hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9e98510e9b7751e0ca1a788b4d132ab21ee520a3 | 1,092 | exs | Elixir | mix.exs | skatsuta/phoenix-json-api | 1da16fc10cf1ce7bb5e47034755f5990bd7acb92 | [
"MIT"
] | null | null | null | mix.exs | skatsuta/phoenix-json-api | 1da16fc10cf1ce7bb5e47034755f5990bd7acb92 | [
"MIT"
] | null | null | null | mix.exs | skatsuta/phoenix-json-api | 1da16fc10cf1ce7bb5e47034755f5990bd7acb92 | [
"MIT"
] | null | null | null | defmodule PhoenixJsonApi.Mixfile do
use Mix.Project
def project do
[app: :phoenix_json_api,
version: "0.0.1",
elixir: "~> 1.0",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[mod: {PhoenixJsonApi, []},
applications: [:phoenix, :phoenix_html, :cowboy, :logger,
:phoenix_ecto, :postgrex]]
end
# Specifies which paths to compile per environment
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies
#
# Type `mix help deps` for examples and options
defp deps do
[{:phoenix, "~> 0.16"},
{:phoenix_ecto, "~> 0.9"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.0"},
{:phoenix_live_reload, "~> 0.6", only: :dev},
{:cowboy, "~> 1.0"}]
end
end
| 27.3 | 63 | 0.606227 |
9e986a7b71791014420e11760c3bcd8c7f7a18b5 | 1,815 | exs | Elixir | harbor/test/test_helper.exs | miapolis/port7 | 7df1223f83d055eeb6ce8f61f4af8b4f2cf33e74 | [
"MIT"
] | null | null | null | harbor/test/test_helper.exs | miapolis/port7 | 7df1223f83d055eeb6ce8f61f4af8b4f2cf33e74 | [
"MIT"
] | null | null | null | harbor/test/test_helper.exs | miapolis/port7 | 7df1223f83d055eeb6ce8f61f4af8b4f2cf33e74 | [
"MIT"
] | null | null | null | ExUnit.start()
defmodule HarborTest do
def elixir_module?(module) do
module
|> Atom.to_string()
|> String.starts_with?("Elixir")
end
def message_module?(module) do
exploded = Module.split(module)
match?(["Pier", "Message", _class, _], exploded)
end
def message_validation_module?(module) do
exploded = Module.split(module)
match?(["PierTest", "Message", _class, _], exploded)
end
def message_test_module?(module) do
exploded = Module.split(module)
match?(["PierTest", _class, _], exploded)
end
def test_for(module) do
["Pier", "Message", class, type] = Module.split(module)
Module.concat(["PierTest", class, type <> "Test"])
end
def validation_for(module) do
["Pier", "Message", class, type] = Module.split(module)
Module.concat(["PierTest", "Message", class, type <> "Test"])
end
end
if System.argv() == ["test"] do
ExUnit.after_suite(fn _ ->
all_elixir_modules =
:code.all_loaded()
|> Enum.map(&elem(&1, 0))
|> Enum.filter(&HarborTest.elixir_module?/1)
message_modules = Enum.filter(all_elixir_modules, &HarborTest.message_module?/1)
message_test_modules = Enum.filter(all_elixir_modules, &HarborTest.message_test_module?/1)
message_validation_modules =
Enum.filter(all_elixir_modules, &HarborTest.message_validation_module?/1)
Enum.each(message_modules, fn module ->
unless (_tm = HarborTest.test_for(module)) in message_test_modules do
# TODO: Tests for every single message
# raise "#{inspect(module)} did not have test module #{inspect(tm)}"
end
unless (_tm = HarborTest.validation_for(module)) in message_validation_modules do
# raise "#{inspect(module)} did not have validation module #{inspect(tm)}"
end
end)
end)
end
| 29.274194 | 94 | 0.674931 |
9e986caac291a3870bdb98d018f8df923e6c7d83 | 6,954 | exs | Elixir | test/oli/publishing/delivery_resolver_test.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 45 | 2020-04-17T15:40:27.000Z | 2022-03-25T00:13:30.000Z | test/oli/publishing/delivery_resolver_test.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 944 | 2020-02-13T02:37:01.000Z | 2022-03-31T17:50:07.000Z | test/oli/publishing/delivery_resolver_test.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 23 | 2020-07-28T03:36:13.000Z | 2022-03-17T14:29:02.000Z | defmodule Oli.Publishing.DeliveryResolverTest do
use Oli.DataCase
alias Oli.Publishing.DeliveryResolver
describe "delivery resolution" do
setup do
Seeder.base_project_with_resource4()
end
test "find_parent_objectives/2 returns parents", %{
child1: child1,
child2: child2,
child3: child3,
child4: child4,
parent1: parent1,
parent2: parent2,
child5: child5,
parent3: parent3,
child6: child6
} do
# find one
assert [parent1.revision] ==
DeliveryResolver.find_parent_objectives("1", [child1.resource.id])
# find both
assert [parent1.revision, parent2.revision] ==
DeliveryResolver.find_parent_objectives("1", [
child1.resource.id,
child4.resource.id
])
assert [parent1.revision, parent2.revision] ==
DeliveryResolver.find_parent_objectives("1", [
child1.resource.id,
child2.resource.id,
child3.resource.id,
child4.resource.id
])
# find none
assert [] ==
DeliveryResolver.find_parent_objectives("1", [
parent1.resource.id,
parent2.resource.id
])
# child5 should only resolve in section 2
assert [] == DeliveryResolver.find_parent_objectives("1", [child5.resource.id])
assert [parent3.revision] ==
DeliveryResolver.find_parent_objectives("2", [child5.resource.id])
# child6 should not resolve anywhere since it and its parent are unpublished
assert [] == DeliveryResolver.find_parent_objectives("1", [child6.resource.id])
assert [] == DeliveryResolver.find_parent_objectives("2", [child6.resource.id])
end
test "from_resource_id/2 returns correct revision", %{
revision1: revision1,
latest1: latest1,
latest4: latest4
} do
assert DeliveryResolver.from_resource_id("1", revision1.resource_id).id == revision1.id
assert DeliveryResolver.from_resource_id("2", revision1.resource_id).id == latest1.id
assert DeliveryResolver.from_resource_id("1", latest4.resource_id) == nil
assert DeliveryResolver.from_resource_id("2", latest4.resource_id) == nil
# verifies we return nil on a made up id
non_existent_resource_id = latest_record_index("resources") + 1
assert DeliveryResolver.from_resource_id("1", non_existent_resource_id) == nil
end
test "from_revision_slug/2 returns correct revision", %{
revision1: revision1,
latest1: latest1,
latest4: latest4
} do
assert DeliveryResolver.from_revision_slug("1", revision1.slug).id == revision1.id
assert DeliveryResolver.from_revision_slug("2", revision1.slug).id == latest1.id
# resolve an intermediate revision
assert DeliveryResolver.from_revision_slug("2", "3").id == latest1.id
# resolve nil on the one that was never published
assert DeliveryResolver.from_revision_slug("1", latest4.slug) == nil
assert DeliveryResolver.from_revision_slug("2", latest4.slug) == nil
# verifies we return nil on a made up slug
assert DeliveryResolver.from_revision_slug("1", "made_up") == nil
end
test "from_resource_id/2 returns correct list of revisions", %{
latest1: latest1,
latest2: latest2,
revision2: revision2,
revision1: revision1,
latest4: latest4,
section_1: section_1,
section_2: section_2
} do
assert DeliveryResolver.from_resource_id(section_1.slug, [
revision1.resource_id,
revision2.resource_id
]) ==
[revision1, revision2]
assert DeliveryResolver.from_resource_id(section_2.slug, [
revision1.resource_id,
revision2.resource_id
]) ==
[latest1, latest2]
assert DeliveryResolver.from_resource_id(section_1.slug, [
latest4.resource_id,
revision2.resource_id
]) ==
[nil, revision2]
assert DeliveryResolver.from_resource_id(section_2.slug, [
latest4.resource_id,
revision2.resource_id
]) ==
[nil, latest2]
# verifies we return nil on a made up id
assert DeliveryResolver.from_resource_id("1", [133_799, 18_283_823]) == [nil, nil]
end
test "from_resource_id/2 orders results according to inputs", %{
latest1: latest1,
latest2: latest2,
revision2: revision2,
revision1: revision1,
latest4: latest4
} do
assert DeliveryResolver.from_resource_id("1", [revision2.resource_id, revision1.resource_id]) ==
[revision2, revision1]
assert DeliveryResolver.from_resource_id("2", [revision2.resource_id, revision1.resource_id]) ==
[latest2, latest1]
assert DeliveryResolver.from_resource_id("1", [revision2.resource_id, latest4.resource_id]) ==
[revision2, nil]
assert DeliveryResolver.from_resource_id("2", [revision2.resource_id, latest4.resource_id]) ==
[latest2, nil]
end
test "all_revisions/1 resolves the all revisions", %{} do
nodes = DeliveryResolver.all_revisions("1")
assert length(nodes) == 12
end
test "all_revisions_in_hierarchy/1 resolves all revisions in the hierarchy", %{} do
nodes = DeliveryResolver.all_revisions_in_hierarchy("1")
assert length(nodes) == 6
end
test "root_resource/1 resolves the root revision", %{
container: %{revision: container_revision}
} do
assert DeliveryResolver.root_container("1") == container_revision
assert DeliveryResolver.root_container("2") == container_revision
end
test "full_hierarchy/1 resolves and reconstructs the entire hierarchy", %{
section_1: section
} do
hierarchy = DeliveryResolver.full_hierarchy(section.slug)
assert hierarchy.numbering.index == 1
assert hierarchy.numbering.level == 0
assert Enum.count(hierarchy.children) == 3
assert hierarchy.children |> Enum.at(0) |> Map.get(:numbering) |> Map.get(:index) == 1
assert hierarchy.children |> Enum.at(0) |> Map.get(:numbering) |> Map.get(:level) == 1
assert hierarchy.children |> Enum.at(1) |> Map.get(:numbering) |> Map.get(:index) == 2
assert hierarchy.children |> Enum.at(2) |> Map.get(:numbering) |> Map.get(:index) == 1
assert hierarchy.children
|> Enum.at(2)
|> Map.get(:children)
|> Enum.at(0)
|> Map.get(:numbering)
|> Map.get(:index) == 3
assert hierarchy.children
|> Enum.at(2)
|> Map.get(:children)
|> Enum.at(0)
|> Map.get(:numbering)
|> Map.get(:level) == 2
end
end
end
| 35.121212 | 102 | 0.627696 |
9e987b897d98d2245595cdd04a908f8ea452adba | 135 | ex | Elixir | test/support/models/invalid_cast.ex | mojidabckuu/ex_machina | 31d19cc5572d85d3c144756b765b634995ab9c2b | [
"MIT"
] | null | null | null | test/support/models/invalid_cast.ex | mojidabckuu/ex_machina | 31d19cc5572d85d3c144756b765b634995ab9c2b | [
"MIT"
] | null | null | null | test/support/models/invalid_cast.ex | mojidabckuu/ex_machina | 31d19cc5572d85d3c144756b765b634995ab9c2b | [
"MIT"
] | 1 | 2021-01-16T19:05:50.000Z | 2021-01-16T19:05:50.000Z | defmodule ExMachina.InvalidCast do
use Ecto.Schema
schema "invalid_casts" do
field(:invalid, ExMachina.InvalidType)
end
end
| 16.875 | 42 | 0.762963 |
9e98bce3533b02bb5b5138c022f36296fb1565fd | 225 | ex | Elixir | lib/elixir_script/lib/store.ex | beadsland/elixirscript | cb9698ad96075fcbe87b3933009d7ab2a2c939de | [
"MIT"
] | 854 | 2017-02-19T01:50:45.000Z | 2022-03-14T18:55:38.000Z | lib/elixir_script/lib/store.ex | beadsland/elixirscript | cb9698ad96075fcbe87b3933009d7ab2a2c939de | [
"MIT"
] | 210 | 2017-02-20T17:44:39.000Z | 2020-08-01T10:18:07.000Z | lib/elixir_script/lib/store.ex | beadsland/elixirscript | cb9698ad96075fcbe87b3933009d7ab2a2c939de | [
"MIT"
] | 56 | 2017-02-19T14:50:05.000Z | 2022-02-25T17:25:30.000Z | defmodule ElixirScript.Core.Store do
@moduledoc false
use ElixirScript.FFI, global: true
defexternal create(value, name \\ nil)
defexternal update(key, value)
defexternal read(key)
defexternal remove(key)
end
| 17.307692 | 40 | 0.751111 |
9e98be75fd3edcdec46f5bf4972f501ffa0c9329 | 1,838 | ex | Elixir | lib/app/blog/blog.ex | krlsdu/placamercosul | 46f1f404eca14897887179d46c00b75c614427ff | [
"MIT"
] | null | null | null | lib/app/blog/blog.ex | krlsdu/placamercosul | 46f1f404eca14897887179d46c00b75c614427ff | [
"MIT"
] | null | null | null | lib/app/blog/blog.ex | krlsdu/placamercosul | 46f1f404eca14897887179d46c00b75c614427ff | [
"MIT"
] | null | null | null | require IEx
defmodule App.Blog do
@moduledoc """
The Blog context.
"""
import Ecto.Query, warn: false
alias App.Repo
alias App.Blog.Post
@doc """
Returns the list of posts.
## Examples
iex> list_posts()
[%Post{}, ...]
"""
def list_posts do
Repo.all(Post)
end
@doc """
Gets a single post.
Raises `Ecto.NoResultsError` if the Post does not exist.
## Examples
iex> get_post!(123)
%Post{}
iex> get_post!(456)
** (Ecto.NoResultsError)
"""
def get_post!(id), do: Repo.get!(Post, id)
@doc """
Creates a post.
## Examples
iex> create_post(%{field: value})
{:ok, %Post{}}
iex> create_post(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_post(attrs \\ %{}) do
%Post{}
|> Post.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a post.
## Examples
iex> update_post(post, %{field: new_value})
{:ok, %Post{}}
iex> update_post(post, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_post(%Post{} = post, attrs) do
post
|> Post.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Post.
## Examples
iex> delete_post(post)
{:ok, %Post{}}
iex> delete_post(post)
{:error, %Ecto.Changeset{}}
"""
def delete_post(%Post{} = post) do
Repo.delete(post)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking post changes.
## Examples
iex> change_post(post)
%Ecto.Changeset{source: %Post{}}
"""
def change_post(%Post{} = post) do
Post.changeset(post, %{})
end
def ovni(attrs \\ %{}) do
ovni = Map.fetch!(attrs, "ovni")
attrs =
if ovni == "true" do
Map.put(attrs, "body", "ovni")
end
IEx.pry()
attrs
end
end
| 15.445378 | 59 | 0.552231 |
9e98bfd8687e08cc985ad18bab30cca2da96f045 | 77 | ex | Elixir | lib/crony.ex | heydtn/crony | 1bc9335edaf6220a724e6a1129f7aef1124ed71f | [
"MIT"
] | null | null | null | lib/crony.ex | heydtn/crony | 1bc9335edaf6220a724e6a1129f7aef1124ed71f | [
"MIT"
] | null | null | null | lib/crony.ex | heydtn/crony | 1bc9335edaf6220a724e6a1129f7aef1124ed71f | [
"MIT"
] | null | null | null | defmodule Crony do
defdelegate run_session(fun), to: Crony.SessionPool
end
| 19.25 | 53 | 0.805195 |
9e98d058b398fb8b0d4cc65a8adbb89aedc3abbb | 130 | exs | Elixir | .formatter.exs | paulswartz/tablespoon | 3637ee22cce16755fa50461058a079fa18d33b1b | [
"MIT"
] | 2 | 2020-04-03T14:39:23.000Z | 2020-12-17T23:12:58.000Z | .formatter.exs | mbta/tablespoon | 4303ad8498f3e619b081ffbd6892156e0214b61e | [
"MIT"
] | 107 | 2019-09-16T12:52:17.000Z | 2022-02-28T10:25:15.000Z | .formatter.exs | mbta/tablespoon | 4303ad8498f3e619b081ffbd6892156e0214b61e | [
"MIT"
] | null | null | null | [
import_deps: [:phoenix],
inputs: ["*.{ex,exs}", "{lib,test}/**/*.{ex,exs}", "config/{config,dev,test,prod,releases}.exs"]
]
| 26 | 98 | 0.576923 |
9e993cad8cd8195e8c518eeba54b4fb96c31d633 | 495 | ex | Elixir | programming/elixir/fizzbuzz.ex | NomikOS/learning | 268f94605214f6861ef476ca7573e68c068ccbe5 | [
"Unlicense"
] | null | null | null | programming/elixir/fizzbuzz.ex | NomikOS/learning | 268f94605214f6861ef476ca7573e68c068ccbe5 | [
"Unlicense"
] | null | null | null | programming/elixir/fizzbuzz.ex | NomikOS/learning | 268f94605214f6861ef476ca7573e68c068ccbe5 | [
"Unlicense"
] | null | null | null | defmodule FizzBuzz do
def start(first, last) do
first..last
|> Enum.each(fn x -> check(x) end)
end
# Enum.filter([1,6,10], fn(number) -> number > 5 end)
# Could be re-written as:
#
# Enum.filter([1,6,10], &(&1 > 5))
defp check(number) when rem(number, 15) == 0, do: IO.puts("FizzBuzz")
defp check(number) when rem(number, 3) == 0, do: IO.puts("Fizz")
defp check(number) when rem(number, 5) == 0, do: IO.puts("Buzz")
defp check(number), do: IO.puts("#{number}")
end
| 29.117647 | 71 | 0.60404 |
9e9948860beb9e039f54599c38d55be82b0704a3 | 1,008 | ex | Elixir | lib/gossip/accounts/user.ex | SwiftAusterity/gossip | d79c53acd02fcb9905acb9730e59065efdd5a589 | [
"MIT"
] | null | null | null | lib/gossip/accounts/user.ex | SwiftAusterity/gossip | d79c53acd02fcb9905acb9730e59065efdd5a589 | [
"MIT"
] | null | null | null | lib/gossip/accounts/user.ex | SwiftAusterity/gossip | d79c53acd02fcb9905acb9730e59065efdd5a589 | [
"MIT"
] | null | null | null | defmodule Gossip.Accounts.User do
@moduledoc """
User schema
"""
use Gossip.Schema
alias Gossip.Games.Game
schema "users" do
field(:email, :string)
field(:password, :string, virtual: true)
field(:password_confirmation, :string, virtual: true)
field(:password_hash, :string)
field(:token, Ecto.UUID)
has_many(:games, Game)
timestamps()
end
def changeset(struct, params) do
struct
|> cast(params, [:email, :password, :password_confirmation])
|> validate_required([:email])
|> validate_format(:email, ~r/.+@.+\..+/)
|> ensure(:token, UUID.uuid4())
|> hash_password()
|> validate_required([:password_hash])
|> validate_confirmation(:password)
|> unique_constraint(:email)
end
defp hash_password(changeset) do
case changeset do
%{valid?: true, changes: %{password: password}} ->
put_change(changeset, :password_hash, Comeonin.Bcrypt.hashpwsalt(password))
_ ->
changeset
end
end
end
| 22.4 | 83 | 0.645833 |
9e994ea9bcbef971a0fefde26334b05e0325740a | 6,056 | ex | Elixir | lib/hexpm/repository/resolver.ex | findmypast/hexfmp | 38a50f5e1057833fd98748faac230bf4b9cc26a3 | [
"Apache-2.0"
] | null | null | null | lib/hexpm/repository/resolver.ex | findmypast/hexfmp | 38a50f5e1057833fd98748faac230bf4b9cc26a3 | [
"Apache-2.0"
] | null | null | null | lib/hexpm/repository/resolver.ex | findmypast/hexfmp | 38a50f5e1057833fd98748faac230bf4b9cc26a3 | [
"Apache-2.0"
] | null | null | null | defmodule Hexpm.Repository.Resolver do
import Ecto.Query, only: [from: 2]
@behaviour Hex.Registry
def run(requirements, build_tools) do
config = guess_config(build_tools)
Code.ensure_loaded(Hex.Resolver)
if function_exported?(Hex.Resolver, :resolve, 4) do
resolve_old(requirements, config)
else
resolve_new(requirements, config)
end
end
defp resolve_old(requirements, config) do
Hex.Registry.open!(__MODULE__)
deps = resolve_old_deps(requirements)
top_level = Enum.map(deps, &elem(&1, 0))
requests = resolve_old_requests(requirements, config)
requests
|> Enum.map(&elem(&1, 0))
|> Hex.Registry.prefetch()
# Hex.Resolver.resolve(requests, deps, top_level, [])
# |> resolve_result()
resolve_result({:ok, ""})
after
Hex.Registry.close
end
defp resolve_new(requirements, config) do
{:ok, _name} = open()
deps = resolve_new_deps(requirements)
top_level = Enum.map(deps, &elem(&1, 0))
requests = resolve_new_requests(requirements, config)
requests
|> Enum.map(&{elem(&1, 0), elem(&1, 1)})
|> prefetch()
# Hex.Resolver.resolve(__MODULE__, requests, deps, top_level, %{}, [])
# |> resolve_result()
resolve_result({:ok, ""})
after
close()
end
defp resolve_result({:ok, _}), do: :ok
defp resolve_result({:error, {:version, messages}}), do: {:error, remove_ansi_escapes(messages)}
defp resolve_result({:error, messages}), do: {:error, remove_ansi_escapes(messages)}
defp remove_ansi_escapes(string) do
String.replace(string, ~r"\e\[[0-9]+[a-zA-Z]", "")
end
defp resolve_old_deps(requirements) do
Enum.map(requirements, fn %{app: app} ->
{app, false, []}
end)
end
defp resolve_new_deps(requirements) do
Enum.map(requirements, fn %{app: app} ->
{"hexpm", app, false, []}
end)
end
defp resolve_old_requests(requirements, config) do
Enum.map(requirements, fn %{name: name, app: app, requirement: req} ->
{name, app, req, config}
end)
end
defp resolve_new_requests(requirements, config) do
Enum.map(requirements, fn %{name: name, app: app, requirement: req} ->
{"hexpm", name, app, req, config}
end)
end
defp guess_config(build_tools) when is_list(build_tools) do
cond do
"mix" in build_tools -> "mix.exs"
"rebar" in build_tools -> "rebar.config"
"rebar3" in build_tools -> "rebar.config"
"erlang.mk" in build_tools -> "Makefile"
true -> "TOP CONFIG"
end
end
defp guess_config(_), do: "TOP CONFIG"
### Hex.Registry callbacks ###
def open(_opts \\ []) do
tid = :ets.new(__MODULE__, [])
Process.put(__MODULE__, tid)
{:ok, tid}
end
def close(name \\ Process.get(__MODULE__)) do
Process.delete(__MODULE__)
if :ets.info(name) == :undefined do
false
else
:ets.delete(name)
end
end
def versions("hexpm", package_name), do: get_versions(Process.get(__MODULE__), package_name)
def versions(name, package_name), do: get_versions(name, package_name)
def deps("hexpm", package, version), do: get_deps_new(Process.get(__MODULE__), package, version)
def deps(name, package, version), do: get_deps_old(name, package, version)
def checksum(_name, _package, _version), do: raise "not implemented"
def prefetch(packages) do
packages = Enum.map(packages, fn {"hexpm", name} -> name end)
prefetch(Process.get(__MODULE__), packages)
end
def prefetch(name, packages) do
packages =
packages
|> Enum.uniq
|> Enum.reject(&:ets.member(name, {:versions, &1}))
packages =
from(p in Hexpm.Repository.Package,
where: p.name in ^packages,
select: {p.id, p.name})
|> Hexpm.Repo.all
|> Map.new
releases =
from(r in Hexpm.Repository.Release,
where: r.package_id in ^Map.keys(packages),
select: {r.package_id, {r.id, r.version}})
|> Hexpm.Repo.all
|> Enum.group_by(&elem(&1, 0), &elem(&1, 1))
versions =
Enum.map(packages, fn {id, name} ->
{{:versions, name}, Enum.map(releases[id], &elem(&1, 1))}
end)
releases =
Enum.flat_map(releases, fn {pid, versions} ->
Enum.map(versions, fn {rid, vsn} ->
{{:release, packages[pid], vsn}, rid}
end)
end)
:ets.insert(name, versions ++ releases)
end
defp get_versions(name, package) do
:ets.lookup_element(name, {:versions, package}, 2)
end
defp get_deps_new(name, package, version) do
get_deps_old(name, package, version)
|> Enum.map(fn {name, app, req, optional} ->
{"hexpm", name, app, req, optional}
end)
end
defp get_deps_old(name, package, version) do
case :ets.lookup(name, {:deps, package, version}) do
[{_, deps}] ->
deps
[] ->
# TODO: Preload requirements in prefetch, maybe?
release_id = :ets.lookup_element(name, {:release, package, version}, 2)
deps =
from(r in Hexpm.Repository.Requirement,
join: p in assoc(r, :dependency),
where: r.release_id == ^release_id,
select: {p.name, r.app, r.requirement, r.optional})
|> Hexpm.Repo.all
:ets.insert(name, {{:deps, package, version}, deps})
deps
end
end
def version(_name),
do: raise "not implemented"
def installs(_name),
do: raise "not implemented"
def stat(_name),
do: raise "not implemented"
def search(_name, _term),
do: raise "not implemented"
def all_packages(_name),
do: raise "not implemented"
def get_checksum(_name, _package, _version),
do: raise "not implemented"
def get_build_tools(_name, _package, _version),
do: raise "not implemented"
def retired(_name, _package, _version),
do: raise "not implemented"
def tarball_etag(_name, _package, _version),
do: raise "not implemented"
def tarball_etag(_name, _package, _version, _String_t),
do: raise "not implemented"
end
| 27.652968 | 98 | 0.628798 |
9e999fcc9d9aed31bba54999be4a573489cedfb8 | 604 | ex | Elixir | lib/origami/tags/tag.ex | OrigamiApp/server | efbf185a33694b47fc94376c8ddc4b30f8e3d620 | [
"Apache-2.0"
] | null | null | null | lib/origami/tags/tag.ex | OrigamiApp/server | efbf185a33694b47fc94376c8ddc4b30f8e3d620 | [
"Apache-2.0"
] | null | null | null | lib/origami/tags/tag.ex | OrigamiApp/server | efbf185a33694b47fc94376c8ddc4b30f8e3d620 | [
"Apache-2.0"
] | null | null | null | defmodule Origami.Tags.Tag do
use Ecto.Schema
import Ecto.Changeset
schema "tags" do
field :background, :string
field :description, :string
field :image, :string
field :lat, :float
field :long, :float
field :width, :float
field :height, :float
belongs_to :user, Origami.Auth.User
timestamps()
end
@doc false
def changeset(tag, attrs) do
tag
|> cast(attrs, [:description, :image, :background, :lat, :long, :width, :height, :user_id])
|> validate_required([:description, :image, :background, :lat, :long, :width, :height, :user_id])
end
end
| 24.16 | 101 | 0.653974 |
9e999ffbac85edc3844546708f606777578502b2 | 3,995 | ex | Elixir | apps/temporario/lib/temporario/paste.ex | achedeuzot/temporar.io | acbec140732614070996924633f254b56e56131f | [
"MIT"
] | 2 | 2019-04-16T18:46:36.000Z | 2020-09-18T12:58:57.000Z | apps/temporario/lib/temporario/paste.ex | achedeuzot/temporar.io | acbec140732614070996924633f254b56e56131f | [
"MIT"
] | 6 | 2019-01-06T11:13:39.000Z | 2022-02-10T15:15:24.000Z | apps/temporario/lib/temporario/paste.ex | achedeuzot/temporar.io | acbec140732614070996924633f254b56e56131f | [
"MIT"
] | null | null | null | defmodule Temporario.Paste do
use Timex
use Ecto.Schema
import Ecto.Changeset
alias Temporario.{Paste, PasteStorage}
@primary_key {:guid, Ecto.UUID, []}
@derive {Phoenix.Param, key: :guid}
@derive {Jason.Encoder, except: [:__meta__]}
schema "pastes" do
field :payload, :string
field :expiration, :utc_datetime
field :destroy_on_reading, :boolean
field :requests, :integer
field :created_at, :utc_datetime
end
@default_expiration "1-minute"
@expiration_presets [
"Immediately": "1-second",
"After a minute": "1-minute",
"After an hour": "1-hour",
"After a day": "1-day",
"After a week": "1-week",
"After a month": "1-month",
"After a year": "1-year",
"Never": "never",
]
@max_payload_chars 52_428_800 # (50MB)
@expiration_presets_to_timeshift %{
"1-second" => [seconds: 1],
"1-minute" => [minutes: 1],
"1-hour" => [hours: 1],
"1-day" => [days: 1],
"1-week" => [weeks: 1],
"1-month" => [months: 1],
"1-year" => [years: 1],
"never" => [years: 1999],
}
def expiration_choices, do: @expiration_presets
def default_expiration_choice, do: @default_expiration
@doc false
def changeset(%Paste{} = paste, attrs \\ %{}) do
# Convert form raw expiration to datetime
attrs = convert_expiration_to_offset(attrs, "expiration")
paste
|> cast(attrs, [:payload, :expiration, :destroy_on_reading])
|> validate_required([:payload, :expiration])
|> validate_length(:payload, min: 2, max: @max_payload_chars)
|> validate_datetime_in_future(:expiration)
|> change(%{guid: UUID.uuid4()})
|> change(%{created_at: DateTime.utc_now()})
end
def from_map(%{} = raw_paste) do
{:ok, dt_expiration, 0} = DateTime.from_iso8601(raw_paste["expiration"])
{:ok, dt_created_at, 0} = DateTime.from_iso8601(raw_paste["created_at"])
%Paste{
guid: raw_paste["guid"],
payload: raw_paste["payload"],
expiration: dt_expiration,
destroy_on_reading: raw_paste["destroy_on_reading"],
requests: raw_paste["requests"],
created_at: dt_created_at,
}
end
def increment_requests(%Paste{} = paste) do
%{paste | requests: paste.requests + 1}
end
def save(%Ecto.Changeset{} = changeset) do
if changeset.valid? do
{:ok, Ecto.Changeset.apply_changes(changeset)}
else
{:error, %{changeset | action: :check_errors}} # action is set, trigger the form errors
end
end
def load(%Paste{} = paste) do
if valid?(paste) do
newpaste = increment_requests(paste)
if newpaste.destroy_on_reading and newpaste.requests >= 2 do
PasteStorage.delete_from_fs(newpaste.guid)
else
PasteStorage.write_to_fs({:ok, newpaste})
end
newpaste
else
PasteStorage.delete_from_fs(paste.guid)
raise Temporario.Paste.InvalidGUID
end
end
defp valid?(%Paste{} = paste) do
case DateTime.compare(paste.expiration, Timex.now) do
:lt -> false
_ -> true
end
end
defp convert_expiration_to_offset(attrs, field) do
expiration_raw_value = Map.get(attrs, field, @default_expiration)
timeshift = Map.get(@expiration_presets_to_timeshift,
expiration_raw_value,
@expiration_presets_to_timeshift[@default_expiration])
Map.put(attrs, field, Timex.shift(DateTime.utc_now(), timeshift))
end
defp validate_datetime_in_future(%Ecto.Changeset{} = changeset, field, options \\ []) do
validate_change(changeset, field, fn field, datetime ->
case DateTime.compare(datetime, DateTime.utc_now()) do
:lt -> [{field, options[:message] || "must be set sometime in the future"}]
_ -> []
end
end)
end
end
defmodule Temporario.Paste.InvalidGUID do
defexception message: "Invalid Paste GUID"
end
defimpl Plug.Exception, for: File.Error do
def status(_exception), do: 404
end
defimpl Plug.Exception, for: Temporario.Paste.InvalidGUID do
def status(_exception), do: 404
end
| 28.333333 | 93 | 0.662078 |
9e99b07f3df8581bd354d40c977798b2e5668d29 | 2,155 | ex | Elixir | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/audit_log_config.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/audit_log_config.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/audit_log_config.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudResourceManager.V1.Model.AuditLogConfig do
@moduledoc """
Provides the configuration for logging a type of permissions.
Example:
{
"audit_log_configs": [
{
"log_type": "DATA_READ",
"exempted_members": [
"user:[email protected]"
]
},
{
"log_type": "DATA_WRITE"
}
]
}
This enables 'DATA_READ' and 'DATA_WRITE' logging, while exempting
[email protected] from DATA_READ logging.
## Attributes
* `exemptedMembers` (*type:* `list(String.t)`, *default:* `nil`) - Specifies the identities that do not cause logging for this type of
permission.
Follows the same format of Binding.members.
* `logType` (*type:* `String.t`, *default:* `nil`) - The log type that this config enables.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:exemptedMembers => list(String.t()),
:logType => String.t()
}
field(:exemptedMembers, type: :list)
field(:logType)
end
defimpl Poison.Decoder, for: GoogleApi.CloudResourceManager.V1.Model.AuditLogConfig do
def decode(value, options) do
GoogleApi.CloudResourceManager.V1.Model.AuditLogConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudResourceManager.V1.Model.AuditLogConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.785714 | 138 | 0.683063 |
9e99c0447eb47a8857558ba7beb4a2ce6fd650f7 | 359 | exs | Elixir | activitypub/priv/repo/seeds.exs | torchhound/activitypub | 362de364764b81ec06ade6d0c48c303218e74522 | [
"MIT"
] | null | null | null | activitypub/priv/repo/seeds.exs | torchhound/activitypub | 362de364764b81ec06ade6d0c48c303218e74522 | [
"MIT"
] | null | null | null | activitypub/priv/repo/seeds.exs | torchhound/activitypub | 362de364764b81ec06ade6d0c48c303218e74522 | [
"MIT"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Activitypub.Repo.insert!(%Activitypub.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.916667 | 61 | 0.713092 |
9e9a037b77a5c510a142431002db2ba10ed45d4c | 2,858 | ex | Elixir | clients/docs/lib/google_api/docs/v1/model/insert_text_request.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/docs/lib/google_api/docs/v1/model/insert_text_request.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/docs/lib/google_api/docs/v1/model/insert_text_request.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Docs.V1.Model.InsertTextRequest do
@moduledoc """
Inserts text at the specified location.
## Attributes
* `endOfSegmentLocation` (*type:* `GoogleApi.Docs.V1.Model.EndOfSegmentLocation.t`, *default:* `nil`) - Inserts the text at the end of a header, footer, footnote or the document body.
* `location` (*type:* `GoogleApi.Docs.V1.Model.Location.t`, *default:* `nil`) - Inserts the text at a specific index in the document. Text must be inserted inside the bounds of an existing Paragraph. For instance, text cannot be inserted at a table's start index (i.e. between the table and its preceding paragraph). The text must be inserted in the preceding paragraph.
* `text` (*type:* `String.t`, *default:* `nil`) - The text to be inserted. Inserting a newline character will implicitly create a new Paragraph at that index. The paragraph style of the new paragraph will be copied from the paragraph at the current insertion index, including lists and bullets. Text styles for inserted text will be determined automatically, generally preserving the styling of neighboring text. In most cases, the text style for the inserted text will match the text immediately before the insertion index. Some control characters (U+0000-U+0008, U+000C-U+001F) and characters from the Unicode Basic Multilingual Plane Private Use Area (U+E000-U+F8FF) will be stripped out of the inserted text.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:endOfSegmentLocation => GoogleApi.Docs.V1.Model.EndOfSegmentLocation.t(),
:location => GoogleApi.Docs.V1.Model.Location.t(),
:text => String.t()
}
field(:endOfSegmentLocation, as: GoogleApi.Docs.V1.Model.EndOfSegmentLocation)
field(:location, as: GoogleApi.Docs.V1.Model.Location)
field(:text)
end
defimpl Poison.Decoder, for: GoogleApi.Docs.V1.Model.InsertTextRequest do
def decode(value, options) do
GoogleApi.Docs.V1.Model.InsertTextRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Docs.V1.Model.InsertTextRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 53.924528 | 716 | 0.752974 |
9e9a15715a242e76cab858b4ddd0c4a866d5b0c7 | 642 | exs | Elixir | accumulate/accumulate.exs | lpvm/exercism_elixir | b32981727c4aaec444680838db4014d70c983f5d | [
"MIT"
] | null | null | null | accumulate/accumulate.exs | lpvm/exercism_elixir | b32981727c4aaec444680838db4014d70c983f5d | [
"MIT"
] | null | null | null | accumulate/accumulate.exs | lpvm/exercism_elixir | b32981727c4aaec444680838db4014d70c983f5d | [
"MIT"
] | null | null | null | defmodule Accumulate do
@doc """
Given a list and a function, apply the function to each list item and
replace it with the function's return value.
Returns a list.
## Examples
iex> Accumulate.accumulate([], fn(x) -> x * 2 end)
[]
iex> Accumulate.accumulate([1, 2, 3], fn(x) -> x * 2 end)
[2, 4, 6]
"""
@spec accumulate(list, (any -> any)) :: list
def accumulate(list, fun) do
accumulate_aux(list, fun, [])
end
defp accumulate_aux([hd | tl], fun, acc) do
acc = acc ++ [fun.(hd)]
accumulate_aux(tl, fun, acc)
end
defp accumulate_aux([], _, acc) do
acc
end
end
| 20.0625 | 73 | 0.58567 |
9e9a1ecd651777dca590582ed3e546e77575d3a2 | 1,277 | exs | Elixir | config/config.exs | drbawb/hls_admin | 237df356d01b0390821e6f0aacd1ec0fe9fdef4b | [
"BSD-3-Clause"
] | 1 | 2020-12-22T08:30:37.000Z | 2020-12-22T08:30:37.000Z | config/config.exs | drbawb/hls_admin | 237df356d01b0390821e6f0aacd1ec0fe9fdef4b | [
"BSD-3-Clause"
] | 8 | 2020-10-12T18:51:41.000Z | 2021-06-16T18:48:36.000Z | config/config.exs | drbawb/hls_admin | 237df356d01b0390821e6f0aacd1ec0fe9fdef4b | [
"BSD-3-Clause"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
use Mix.Config
config :hls_admin,
ecto_repos: [HlsAdmin.Repo]
# Configure FFMPEG Server Process
config :hls_admin, HlsAdmin.FfmpegServer,
hls_root: "/srv/hls",
playlist: "cdn00"
# Configure server file browser process
config :hls_admin, HlsAdmin.AdminUI,
parent_path: "/mnt/media"
# Configures the endpoint
config :hls_admin, HlsAdminWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "EdsTOPn9NSywmF3rN1hVoXmPiNQG1EphLai47sLmEAXaa5Rd8KbJNrdAfHfRD3xT",
render_errors: [view: HlsAdminWeb.ErrorView, accepts: ~w(html json)],
pubsub_server: HlsAdmin.PubSub,
live_view: [signing_salt: "cq6As+iTY6BQ6GLaeombnvdNq7rZ6cwH"]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 31.146341 | 86 | 0.774471 |
9e9a36b5d7ea62013a29ff769ef5d014b04afd41 | 2,334 | ex | Elixir | lib/telecms_web.ex | carbon-hvze/telecms | 72db81c4321cd85f8b516ef5ee1c169a7cc753d1 | [
"MIT"
] | 1 | 2022-02-17T03:00:39.000Z | 2022-02-17T03:00:39.000Z | lib/telecms_web.ex | carbon-hvze/telecms | 72db81c4321cd85f8b516ef5ee1c169a7cc753d1 | [
"MIT"
] | null | null | null | lib/telecms_web.ex | carbon-hvze/telecms | 72db81c4321cd85f8b516ef5ee1c169a7cc753d1 | [
"MIT"
] | null | null | null | defmodule TelecmsWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use TelecmsWeb, :controller
use TelecmsWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: TelecmsWeb
import Plug.Conn
alias TelecmsWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/telecms_web/templates",
namespace: TelecmsWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def live_view do
quote do
use Phoenix.LiveView,
layout: {TelecmsWeb.LayoutView, "live.html"}
unquote(view_helpers())
end
end
def live_component do
quote do
use Phoenix.LiveComponent
unquote(view_helpers())
end
end
def component do
quote do
use Phoenix.Component
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
import Phoenix.LiveView.Router
end
end
def channel do
quote do
use Phoenix.Channel
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import LiveView and .heex helpers (live_render, live_patch, <.form>, etc)
import Phoenix.LiveView.Helpers
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import TelecmsWeb.ErrorHelpers
alias TelecmsWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 21.611111 | 81 | 0.675236 |
9e9a79b1a6f237d2ad1c9473b2c4b89c5ebb6d92 | 500 | exs | Elixir | apps/snitch_core/config/dev.exs | saurabharch/avia | 74a82a95cf8bfe8143d1fce8136a3bb7ffc9467c | [
"MIT"
] | 1 | 2018-12-01T18:13:55.000Z | 2018-12-01T18:13:55.000Z | apps/snitch_core/config/dev.exs | saurabharch/avia | 74a82a95cf8bfe8143d1fce8136a3bb7ffc9467c | [
"MIT"
] | null | null | null | apps/snitch_core/config/dev.exs | saurabharch/avia | 74a82a95cf8bfe8143d1fce8136a3bb7ffc9467c | [
"MIT"
] | null | null | null | use Mix.Config
# Configure your database
config :snitch_core, Snitch.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "snitch_dev",
hostname: "localhost",
pool_size: 10
config :snitch_core, :defaults_module, Snitch.Tools.Defaults
config :arc, storage: Arc.Storage.Local
config :snitch_core, :user_config_module, Snitch.Tools.UserConfig
# TODO: Remove this hack when we set up the config system
config :snitch_core, :defaults, currency: :USD
| 27.777778 | 65 | 0.766 |
9e9a94647a0e7c7adb5a0b446b4b802b21df4108 | 222 | ex | Elixir | plug_example/lib/plug_example/hello_world_plug.ex | acac99/elixir-exploration | ffa94af70188204882d228b5716c3c4e25f0c202 | [
"MIT"
] | null | null | null | plug_example/lib/plug_example/hello_world_plug.ex | acac99/elixir-exploration | ffa94af70188204882d228b5716c3c4e25f0c202 | [
"MIT"
] | 1 | 2021-06-25T15:23:14.000Z | 2021-06-25T15:23:14.000Z | plug_example/lib/plug_example/hello_world_plug.ex | acordiner92/elixir-exploration | ffa94af70188204882d228b5716c3c4e25f0c202 | [
"MIT"
] | null | null | null | defmodule PlugExample.HelloWorldPlug do
import Plug.Conn
def init(options), do: options
def call(conn, _opts) do
conn
|> put_resp_content_type("text/plain")
|> send_resp(200, 'Hello World\n')
end
end
| 18.5 | 42 | 0.693694 |
9e9ab6ff214776805111966b7387fda000025d47 | 1,780 | ex | Elixir | lib/surface/content_handler.ex | wrren/surface | c54afa57949a653ac5fa164691ebb8655b93e282 | [
"MIT"
] | null | null | null | lib/surface/content_handler.ex | wrren/surface | c54afa57949a653ac5fa164691ebb8655b93e282 | [
"MIT"
] | null | null | null | lib/surface/content_handler.ex | wrren/surface | c54afa57949a653ac5fa164691ebb8655b93e282 | [
"MIT"
] | null | null | null | defmodule Surface.ContentHandler do
@moduledoc false
import Phoenix.LiveView.Helpers, only: [sigil_L: 2]
defmacro __before_compile__(_env) do
quote do
defoverridable render: 1
def render(assigns) do
assigns = unquote(__MODULE__).init_contents(assigns)
super(assigns)
end
end
end
def init_contents(assigns) do
{%{__default__: default_group}, data_groups} =
assigns
|> get_in([:__surface__, :groups])
|> Map.split([:__default__])
props =
for {name, %{size: _size, binding: binding}} <- data_groups, into: %{} do
value =
assigns[name]
|> Enum.with_index()
|> Enum.map(fn {assign, index} ->
Map.put(assign, :inner_content, data_content_fun(assigns, name, index, binding: binding))
end)
{name, value}
end
content = default_content_fun(assigns, default_group.size, binding: default_group.binding)
assigns
|> Map.merge(props)
|> Map.put(:inner_content, content)
end
defp data_content_fun(assigns, name, index, binding: true) do
fn args -> assigns.inner_content.({name, index, args}) end
end
defp data_content_fun(assigns, name, index, binding: false) do
fn -> assigns.inner_content.({name, index, []}) end
end
defp default_content_fun(assigns, size, binding: true) do
fn args -> join_contents(assigns, size, args) end
end
defp default_content_fun(assigns, size, binding: false) do
fn -> join_contents(assigns, size, []) end
end
defp join_contents(assigns, size, args) do
~L"""
<%= if assigns[:inner_content] != nil do %>
<%= for index <- 0..size-1 do %><%= assigns.inner_content.({:__default__, index, args}) %><% end %>
<% end %>
"""
end
end
| 27.384615 | 103 | 0.634831 |
9e9ad93929a55e9c4805f268c4e7593d33171cf9 | 1,982 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/external_vpn_gateway_interface.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/external_vpn_gateway_interface.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/external_vpn_gateway_interface.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.ExternalVpnGatewayInterface do
@moduledoc """
The interface for the external VPN gateway.
## Attributes
* `id` (*type:* `integer()`, *default:* `nil`) - The numeric ID of this interface. The allowed input values for this id for different redundancy types of external VPN gateway: SINGLE_IP_INTERNALLY_REDUNDANT - 0 TWO_IPS_REDUNDANCY - 0, 1 FOUR_IPS_REDUNDANCY - 0, 1, 2, 3
* `ipAddress` (*type:* `String.t`, *default:* `nil`) - IP address of the interface in the external VPN gateway. Only IPv4 is supported. This IP address can be either from your on-premise gateway or another Cloud provider's VPN gateway, it cannot be an IP address from Google Compute Engine.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:id => integer() | nil,
:ipAddress => String.t() | nil
}
field(:id)
field(:ipAddress)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.ExternalVpnGatewayInterface do
def decode(value, options) do
GoogleApi.Compute.V1.Model.ExternalVpnGatewayInterface.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.ExternalVpnGatewayInterface do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.64 | 294 | 0.740161 |
9e9ae558bff5f3ae33a380590893a9ae1a8ce843 | 400 | exs | Elixir | backend/.formatter.exs | eeng/caffe | d85d0dd56a8204c715052ddaf3d990e47c5df0e9 | [
"MIT"
] | 7 | 2020-03-27T08:26:52.000Z | 2021-08-29T09:50:31.000Z | backend/.formatter.exs | eeng/caffe | d85d0dd56a8204c715052ddaf3d990e47c5df0e9 | [
"MIT"
] | null | null | null | backend/.formatter.exs | eeng/caffe | d85d0dd56a8204c715052ddaf3d990e47c5df0e9 | [
"MIT"
] | null | null | null | [
import_deps: [:ecto, :phoenix, :commanded, :absinthe],
inputs: ["*.{ex,exs}", "priv/*/seeds.exs", "{config,lib,test}/**/*.{ex,exs}"],
subdirectories: ["priv/*/migrations"],
locals_without_parens: [
project: 2,
project: 3,
validates: 2,
assert_error: 2,
assert_contain_exactly: 2,
assert_contain_exactly: 3,
assert_lists_equal: 2,
assert_lists_equal: 3
]
]
| 25 | 80 | 0.63 |
9e9af29ce8be4e7c2b192de76ceffb94b10cb2b3 | 37,097 | exs | Elixir | lib/elixir/test/elixir/string_test.exs | ihaveint/elixir | 659261eebbd325da75ea16a20305097247630fcb | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/string_test.exs | ihaveint/elixir | 659261eebbd325da75ea16a20305097247630fcb | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/string_test.exs | ihaveint/elixir | 659261eebbd325da75ea16a20305097247630fcb | [
"Apache-2.0"
] | null | null | null | Code.require_file("test_helper.exs", __DIR__)
defmodule StringTest do
use ExUnit.Case, async: true
doctest String
test "next_codepoint/1" do
assert String.next_codepoint("ésoj") == {"é", "soj"}
assert String.next_codepoint(<<255>>) == {<<255>>, ""}
assert String.next_codepoint("") == nil
end
# test cases described in https://mortoray.com/2013/11/27/the-string-type-is-broken/
test "Unicode" do
assert String.reverse("noël") == "lëon"
assert String.slice("noël", 0..2) == "noë"
assert String.length("noël") == 4
assert String.length("") == 2
assert String.slice("", 1..1) == ""
assert String.reverse("") == ""
assert String.upcase("baffle") == "BAFFLE"
assert String.equivalent?("noël", "noël")
end
test "split/1,2,3" do
assert String.split("") == []
assert String.split("foo bar") == ["foo", "bar"]
assert String.split(" foo bar") == ["foo", "bar"]
assert String.split("foo bar ") == ["foo", "bar"]
assert String.split(" foo bar ") == ["foo", "bar"]
assert String.split("foo\t\n\v\f\r\sbar\n") == ["foo", "bar"]
assert String.split("foo" <> <<194, 133>> <> "bar") == ["foo", "bar"]
# information separators are not considered whitespace
assert String.split("foo\u001Fbar") == ["foo\u001Fbar"]
# no-break space is excluded
assert String.split("foo\00A0bar") == ["foo\00A0bar"]
assert String.split("foo\u202Fbar") == ["foo\u202Fbar"]
assert String.split("a,b,c", ",") == ["a", "b", "c"]
assert String.split("a,b", ".") == ["a,b"]
assert String.split("1,2 3,4", [" ", ","]) == ["1", "2", "3", "4"]
assert String.split("", ",") == [""]
assert String.split(" a b c ", " ") == ["", "a", "b", "c", ""]
assert String.split(" a b c ", " ", parts: :infinity) == ["", "a", "b", "c", ""]
assert String.split(" a b c ", " ", parts: 1) == [" a b c "]
assert String.split(" a b c ", " ", parts: 2) == ["", "a b c "]
assert String.split("", ",", trim: true) == []
assert String.split(" a b c ", " ", trim: true) == ["a", "b", "c"]
assert String.split(" a b c ", " ", trim: true, parts: :infinity) == ["a", "b", "c"]
assert String.split(" a b c ", " ", trim: true, parts: 1) == [" a b c "]
assert String.split(" a b c ", " ", trim: true, parts: 2) == ["a", "b c "]
assert String.split("abé", "") == ["", "a", "b", "é", ""]
assert String.split("abé", "", parts: :infinity) == ["", "a", "b", "é", ""]
assert String.split("abé", "", parts: 1) == ["abé"]
assert String.split("abé", "", parts: 2) == ["", "abé"]
assert String.split("abé", "", parts: 3) == ["", "a", "bé"]
assert String.split("abé", "", parts: 4) == ["", "a", "b", "é"]
assert String.split("abé", "", parts: 5) == ["", "a", "b", "é", ""]
assert String.split("abé", "", parts: 10) == ["", "a", "b", "é", ""]
assert String.split("abé", "", trim: true) == ["a", "b", "é"]
assert String.split("abé", "", trim: true, parts: :infinity) == ["a", "b", "é"]
assert String.split("abé", "", trim: true, parts: 2) == ["a", "bé"]
assert String.split("abé", "", trim: true, parts: 3) == ["a", "b", "é"]
assert String.split("abé", "", trim: true, parts: 4) == ["a", "b", "é"]
assert String.split("noël", "") == ["", "n", "o", "ë", "l", ""]
assert String.split("x-", "-", parts: 2, trim: true) == ["x"]
assert String.split("x-x-", "-", parts: 3, trim: true) == ["x", "x"]
assert String.split("hello", []) == ["hello"]
assert String.split("hello", [], trim: true) == ["hello"]
assert String.split("", []) == [""]
assert String.split("", [], trim: true) == []
end
test "split/2,3 with regex" do
assert String.split("", ~r{,}) == [""]
assert String.split("", ~r{,}, trim: true) == []
assert String.split("a,b", ~r{,}) == ["a", "b"]
assert String.split("a,b,c", ~r{,}) == ["a", "b", "c"]
assert String.split("a,b,c", ~r{,}, parts: 2) == ["a", "b,c"]
assert String.split("a,b.c ", ~r{\W}) == ["a", "b", "c", ""]
assert String.split("a,b.c ", ~r{\W}, trim: false) == ["a", "b", "c", ""]
assert String.split("a,b", ~r{\.}) == ["a,b"]
end
test "split/2,3 with compiled pattern" do
pattern = :binary.compile_pattern("-")
assert String.split("x-", pattern) == ["x", ""]
assert String.split("x-", pattern, parts: 2, trim: true) == ["x"]
assert String.split("x-x-", pattern, parts: 3, trim: true) == ["x", "x"]
end
test "splitter/2,3" do
assert String.splitter("a,b,c", ",") |> Enum.to_list() == ["a", "b", "c"]
assert String.splitter("a,b", ".") |> Enum.to_list() == ["a,b"]
assert String.splitter("1,2 3,4", [" ", ","]) |> Enum.to_list() == ["1", "2", "3", "4"]
assert String.splitter("", ",") |> Enum.to_list() == [""]
assert String.splitter("", ",", trim: true) |> Enum.to_list() == []
assert String.splitter(" a b c ", " ", trim: true) |> Enum.to_list() == ["a", "b", "c"]
assert String.splitter(" a b c ", " ", trim: true) |> Enum.take(1) == ["a"]
assert String.splitter(" a b c ", " ", trim: true) |> Enum.take(2) == ["a", "b"]
assert String.splitter("hello", []) |> Enum.to_list() == ["hello"]
assert String.splitter("hello", [], trim: true) |> Enum.to_list() == ["hello"]
assert String.splitter("", []) |> Enum.to_list() == [""]
assert String.splitter("", [], trim: true) |> Enum.to_list() == []
end
test "split_at/2" do
assert String.split_at("", 0) == {"", ""}
assert String.split_at("", -1) == {"", ""}
assert String.split_at("", 1) == {"", ""}
assert String.split_at("abc", 0) == {"", "abc"}
assert String.split_at("abc", 2) == {"ab", "c"}
assert String.split_at("abc", 3) == {"abc", ""}
assert String.split_at("abc", 4) == {"abc", ""}
assert String.split_at("abc", 1000) == {"abc", ""}
assert String.split_at("abc", -1) == {"ab", "c"}
assert String.split_at("abc", -3) == {"", "abc"}
assert String.split_at("abc", -4) == {"", "abc"}
assert String.split_at("abc", -1000) == {"", "abc"}
assert_raise FunctionClauseError, fn ->
String.split_at("abc", 0.1)
end
assert_raise FunctionClauseError, fn ->
String.split_at("abc", -0.1)
end
end
test "upcase/1" do
assert String.upcase("123 abcd 456 efg hij ( %$#) kl mnop @ qrst = -_ uvwxyz") ==
"123 ABCD 456 EFG HIJ ( %$#) KL MNOP @ QRST = -_ UVWXYZ"
assert String.upcase("") == ""
assert String.upcase("abcD") == "ABCD"
end
test "upcase/1 with UTF-8" do
assert String.upcase("& % # àáâ ãäå 1 2 ç æ") == "& % # ÀÁÂ ÃÄÅ 1 2 Ç Æ"
assert String.upcase("àáâãäåæçèéêëìíîïðñòóôõöøùúûüýþ") == "ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ"
end
test "upcase/1 with UTF-8 multibyte" do
assert String.upcase("straße") == "STRASSE"
assert String.upcase("áüÈß") == "ÁÜÈSS"
end
test "upcase/1 with ascii" do
assert String.upcase("olá", :ascii) == "OLá"
end
test "upcase/1 with turkic" do
assert String.upcase("ıi", :turkic) == "Iİ"
assert String.upcase("Iİ", :turkic) == "Iİ"
end
test "downcase/1" do
assert String.downcase("123 ABcD 456 EfG HIJ ( %$#) KL MNOP @ QRST = -_ UVWXYZ") ==
"123 abcd 456 efg hij ( %$#) kl mnop @ qrst = -_ uvwxyz"
assert String.downcase("abcD") == "abcd"
assert String.downcase("") == ""
end
test "downcase/1 with UTF-8" do
assert String.downcase("& % # ÀÁÂ ÃÄÅ 1 2 Ç Æ") == "& % # àáâ ãäå 1 2 ç æ"
assert String.downcase("ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ") == "àáâãäåæçèéêëìíîïðñòóôõöøùúûüýþ"
assert String.downcase("áüÈß") == "áüèß"
end
test "downcase/1 with greek final sigma" do
assert String.downcase("Σ") == "σ"
assert String.downcase("ΣΣ") == "σσ"
assert String.downcase("Σ ΣΣ") == "σ σσ"
assert String.downcase("ΜΕΣ'ΑΠΟ") == "μεσ'απο"
assert String.downcase("ΑΣ'ΤΟΥΣ") == "ασ'τουσ"
assert String.downcase("Σ", :greek) == "σ"
assert String.downcase("Σ ΣΣ", :greek) == "σ σς"
assert String.downcase("Σ ΣΑΣ Σ", :greek) == "σ σας σ"
assert String.downcase("ΜΕΣ'ΑΠΟ", :greek) == "μεσ'απο"
assert String.downcase("ΑΣ'ΤΟΥΣ", :greek) == "ασ'τους"
end
test "downcase/1 with ascii" do
assert String.downcase("OLÁ", :ascii) == "olÁ"
end
test "downcase/1 with turkic" do
assert String.downcase("Iİ", :turkic) == "ıi"
assert String.downcase("İ", :turkic) == "i"
assert String.downcase("ıi", :turkic) == "ıi"
assert String.downcase("i", :turkic) == "i"
assert String.downcase("İ") == "i̇"
end
test "capitalize/1" do
assert String.capitalize("") == ""
assert String.capitalize("abc") == "Abc"
assert String.capitalize("ABC") == "Abc"
assert String.capitalize("c b a") == "C b a"
assert String.capitalize("1ABC") == "1abc"
assert String.capitalize("_aBc1") == "_abc1"
assert String.capitalize(" aBc1") == " abc1"
end
test "capitalize/1 with UTF-8" do
assert String.capitalize("àáâ") == "Àáâ"
assert String.capitalize("ÀÁÂ") == "Àáâ"
assert String.capitalize("âáà") == "Âáà"
assert String.capitalize("ÂÁÀ") == "Âáà"
assert String.capitalize("òóôõö") == "Òóôõö"
assert String.capitalize("ÒÓÔÕÖ") == "Òóôõö"
assert String.capitalize("fin") == "Fin"
end
test "capitalize/1 with ascii" do
assert String.capitalize("àáâ", :ascii) == "àáâ"
assert String.capitalize("aáA", :ascii) == "Aáa"
end
test "capitalize/1 with turkic" do
assert String.capitalize("iii", :turkic) == "İii"
assert String.capitalize("ııı", :turkic) == "Iıı"
assert String.capitalize("İii", :turkic) == "İii"
assert String.capitalize("Iıı", :turkic) == "Iıı"
end
test "replace_leading/3" do
assert String.replace_leading("aa abc ", "a", "b") == "bb abc "
assert String.replace_leading("__ abc ", "_", "b") == "bb abc "
assert String.replace_leading("aaaaaaaa ", "a", "b") == "bbbbbbbb "
assert String.replace_leading("aaaaaaaa ", "aaa", "b") == "bbaa "
assert String.replace_leading("aaaaaaaaa", "a", "b") == "bbbbbbbbb"
assert String.replace_leading("]]]]]]", "]", "[]") == "[][][][][][]"
assert String.replace_leading("]]]]]]]]", "]", "") == ""
assert String.replace_leading("]]]]]] ]", "]", "") == " ]"
assert String.replace_leading("猫猫 cat ", "猫", "й") == "йй cat "
assert String.replace_leading("test", "t", "T") == "Test"
assert String.replace_leading("t", "t", "T") == "T"
assert String.replace_leading("aaa", "b", "c") == "aaa"
message = ~r/cannot use an empty string/
assert_raise ArgumentError, message, fn ->
String.replace_leading("foo", "", "bar")
end
assert_raise ArgumentError, message, fn ->
String.replace_leading("", "", "bar")
end
end
test "replace_trailing/3" do
assert String.replace_trailing(" abc aa", "a", "b") == " abc bb"
assert String.replace_trailing(" abc __", "_", "b") == " abc bb"
assert String.replace_trailing(" aaaaaaaa", "a", "b") == " bbbbbbbb"
assert String.replace_trailing(" aaaaaaaa", "aaa", "b") == " aabb"
assert String.replace_trailing("aaaaaaaaa", "a", "b") == "bbbbbbbbb"
assert String.replace_trailing("]]]]]]", "]", "[]") == "[][][][][][]"
assert String.replace_trailing("]]]]]]]]", "]", "") == ""
assert String.replace_trailing("] ]]]]]]", "]", "") == "] "
assert String.replace_trailing(" cat 猫猫", "猫", "й") == " cat йй"
assert String.replace_trailing("test", "t", "T") == "tesT"
assert String.replace_trailing("t", "t", "T") == "T"
assert String.replace_trailing("aaa", "b", "c") == "aaa"
message = ~r/cannot use an empty string/
assert_raise ArgumentError, message, fn ->
String.replace_trailing("foo", "", "bar")
end
assert_raise ArgumentError, message, fn ->
String.replace_trailing("", "", "bar")
end
end
test "trim/1,2" do
assert String.trim("") == ""
assert String.trim(" abc ") == "abc"
assert String.trim("a abc a\n\n") == "a abc a"
assert String.trim("a abc a\t\n\v\f\r\s") == "a abc a"
assert String.trim("___ abc ___", "_") == " abc "
assert String.trim("猫猫猫cat猫猫猫", "猫猫") == "猫cat猫"
# no-break space
assert String.trim("\u00A0a abc a\u00A0") == "a abc a"
# whitespace defined as a range
assert String.trim("\u2008a abc a\u2005") == "a abc a"
end
test "trim_leading/1,2" do
assert String.trim_leading("") == ""
assert String.trim_leading(" abc ") == "abc "
assert String.trim_leading("a abc a") == "a abc a"
assert String.trim_leading("\n\na abc a") == "a abc a"
assert String.trim_leading("\t\n\v\f\r\sa abc a") == "a abc a"
assert String.trim_leading(<<194, 133, "a abc a">>) == "a abc a"
# information separators are not whitespace
assert String.trim_leading("\u001F a abc a") == "\u001F a abc a"
# no-break space
assert String.trim_leading("\u00A0 a abc a") == "a abc a"
assert String.trim_leading("aa aaa", "aaa") == "aa aaa"
assert String.trim_leading("aaa aaa", "aa") == "a aaa"
assert String.trim_leading("aa abc ", "a") == " abc "
assert String.trim_leading("__ abc ", "_") == " abc "
assert String.trim_leading("aaaaaaaaa ", "a") == " "
assert String.trim_leading("aaaaaaaaaa", "a") == ""
assert String.trim_leading("]]]]]] ]", "]") == " ]"
assert String.trim_leading("猫猫 cat ", "猫") == " cat "
assert String.trim_leading("test", "t") == "est"
assert String.trim_leading("t", "t") == ""
assert String.trim_leading("", "t") == ""
end
test "trim_trailing/1,2" do
assert String.trim_trailing("") == ""
assert String.trim_trailing("1\n") == "1"
assert String.trim_trailing("\r\n") == ""
assert String.trim_trailing(" abc ") == " abc"
assert String.trim_trailing(" abc a") == " abc a"
assert String.trim_trailing("a abc a\n\n") == "a abc a"
assert String.trim_trailing("a abc a\t\n\v\f\r\s") == "a abc a"
assert String.trim_trailing(<<"a abc a", 194, 133>>) == "a abc a"
# information separators are not whitespace
assert String.trim_trailing("a abc a \u001F") == "a abc a \u001F"
# no-break space
assert String.trim_trailing("a abc a \u00A0") == "a abc a"
assert String.trim_trailing("aaa aa", "aaa") == "aaa aa"
assert String.trim_trailing("aaa aaa", "aa") == "aaa a"
assert String.trim_trailing(" abc aa", "a") == " abc "
assert String.trim_trailing(" abc __", "_") == " abc "
assert String.trim_trailing(" aaaaaaaaa", "a") == " "
assert String.trim_trailing("aaaaaaaaaa", "a") == ""
assert String.trim_trailing("] ]]]]]]", "]") == "] "
assert String.trim_trailing(" cat 猫猫", "猫") == " cat "
assert String.trim_trailing("test", "t") == "tes"
assert String.trim_trailing("t", "t") == ""
assert String.trim_trailing("", "t") == ""
end
test "pad_leading/2,3" do
assert String.pad_leading("", 5) == " "
assert String.pad_leading("abc", 5) == " abc"
assert String.pad_leading(" abc ", 9) == " abc "
assert String.pad_leading("猫", 5) == " 猫"
assert String.pad_leading("-", 0) == "-"
assert String.pad_leading("-", 1) == "-"
assert String.pad_leading("---", 5, "abc") == "ab---"
assert String.pad_leading("---", 9, "abc") == "abcabc---"
assert String.pad_leading("---", 5, ["abc"]) == "abcabc---"
assert String.pad_leading("--", 6, ["a", "bc"]) == "abcabc--"
assert_raise FunctionClauseError, fn ->
String.pad_leading("-", -1)
end
assert_raise FunctionClauseError, fn ->
String.pad_leading("-", 1, [])
end
message = "expected a string padding element, got: 10"
assert_raise ArgumentError, message, fn ->
String.pad_leading("-", 3, ["-", 10])
end
end
test "pad_trailing/2,3" do
assert String.pad_trailing("", 5) == " "
assert String.pad_trailing("abc", 5) == "abc "
assert String.pad_trailing(" abc ", 9) == " abc "
assert String.pad_trailing("猫", 5) == "猫 "
assert String.pad_trailing("-", 0) == "-"
assert String.pad_trailing("-", 1) == "-"
assert String.pad_trailing("---", 5, "abc") == "---ab"
assert String.pad_trailing("---", 9, "abc") == "---abcabc"
assert String.pad_trailing("---", 5, ["abc"]) == "---abcabc"
assert String.pad_trailing("--", 6, ["a", "bc"]) == "--abcabc"
assert_raise FunctionClauseError, fn ->
String.pad_trailing("-", -1)
end
assert_raise FunctionClauseError, fn ->
String.pad_trailing("-", 1, [])
end
message = "expected a string padding element, got: 10"
assert_raise ArgumentError, message, fn ->
String.pad_trailing("-", 3, ["-", 10])
end
end
test "reverse/1" do
assert String.reverse("") == ""
assert String.reverse("abc") == "cba"
assert String.reverse("Hello World") == "dlroW olleH"
assert String.reverse("Hello ∂og") == "go∂ olleH"
assert String.reverse("Ā̀stute") == "etutsĀ̀"
assert String.reverse(String.reverse("Hello World")) == "Hello World"
assert String.reverse(String.reverse("Hello \r\n World")) == "Hello \r\n World"
end
describe "replace/3" do
test "with empty string and string replacement" do
assert String.replace("elixir", "", "") == "elixir"
assert String.replace("ELIXIR", "", ".") == ".E.L.I.X.I.R."
assert String.replace("ELIXIR", "", ".", global: true) == ".E.L.I.X.I.R."
assert String.replace("ELIXIR", "", ".", global: false) == ".ELIXIR"
end
test "with empty pattern list" do
assert String.replace("elixir", [], "anything") == "elixir"
end
test "with match pattern and string replacement" do
assert String.replace("a,b,c", ",", "-") == "a-b-c"
assert String.replace("a,b,c", [",", "b"], "-") == "a---c"
assert String.replace("a,b,c", ",", "-", global: false) == "a-b,c"
assert String.replace("a,b,c", [",", "b"], "-", global: false) == "a-b,c"
assert String.replace("ãéã", "é", "e", global: false) == "ãeã"
end
test "with regex and string replacement" do
assert String.replace("a,b,c", ~r/,(.)/, ",\\1\\1") == "a,bb,cc"
assert String.replace("a,b,c", ~r/,(.)/, ",\\1\\1", global: false) == "a,bb,c"
end
test "with empty string and function replacement" do
assert String.replace("elixir", "", fn "" -> "" end) == "elixir"
assert String.replace("ELIXIR", "", fn "" -> "." end) == ".E.L.I.X.I.R."
assert String.replace("ELIXIR", "", fn "" -> "." end, global: true) == ".E.L.I.X.I.R."
assert String.replace("ELIXIR", "", fn "" -> "." end, global: false) == ".ELIXIR"
assert String.replace("elixir", "", fn "" -> [""] end) == "elixir"
assert String.replace("ELIXIR", "", fn "" -> ["."] end) == ".E.L.I.X.I.R."
assert String.replace("ELIXIR", "", fn "" -> ["."] end, global: true) == ".E.L.I.X.I.R."
assert String.replace("ELIXIR", "", fn "" -> ["."] end, global: false) == ".ELIXIR"
end
test "with match pattern and function replacement" do
assert String.replace("a,b,c", ",", fn "," -> "-" end) == "a-b-c"
assert String.replace("a,b,c", [",", "b"], fn x -> "[#{x}]" end) == "a[,][b][,]c"
assert String.replace("a,b,c", [",", "b"], fn x -> [?[, x, ?]] end) == "a[,][b][,]c"
assert String.replace("a,b,c", ",", fn "," -> "-" end, global: false) == "a-b,c"
assert String.replace("a,b,c", [",", "b"], fn x -> "[#{x}]" end, global: false) == "a[,]b,c"
assert String.replace("ãéã", "é", fn "é" -> "e" end, global: false) == "ãeã"
end
test "with regex and function replacement" do
assert String.replace("a,b,c", ~r/,(.)/, fn x -> "#{x}#{x}" end) == "a,b,b,c,c"
assert String.replace("a,b,c", ~r/,(.)/, fn x -> [x, x] end) == "a,b,b,c,c"
assert String.replace("a,b,c", ~r/,(.)/, fn x -> "#{x}#{x}" end, global: false) == "a,b,b,c"
assert String.replace("a,b,c", ~r/,(.)/, fn x -> [x, x] end, global: false) == "a,b,b,c"
end
end
describe "replace/4" do
test "with incorrect params" do
assert_raise FunctionClauseError, "no function clause matching in String.replace/4", fn ->
String.replace("a,b,c", "a,b,c", ",", "")
end
end
end
test "duplicate/2" do
assert String.duplicate("abc", 0) == ""
assert String.duplicate("abc", 1) == "abc"
assert String.duplicate("abc", 2) == "abcabc"
assert String.duplicate("&ã$", 2) == "&ã$&ã$"
assert_raise ArgumentError, fn ->
String.duplicate("abc", -1)
end
end
test "codepoints/1" do
assert String.codepoints("elixir") == ["e", "l", "i", "x", "i", "r"]
# slovak
assert String.codepoints("elixír") == ["e", "l", "i", "x", "í", "r"]
# armenian
assert String.codepoints("ոգելից ըմպելիք") ==
["ո", "գ", "ե", "լ", "ի", "ց", " ", "ը", "մ", "պ", "ե", "լ", "ի", "ք"]
# belarussian
assert String.codepoints("эліксір") == ["э", "л", "і", "к", "с", "і", "р"]
# greek
assert String.codepoints("ελιξήριο") == ["ε", "λ", "ι", "ξ", "ή", "ρ", "ι", "ο"]
# hebraic
assert String.codepoints("סם חיים") == ["ס", "ם", " ", "ח", "י", "י", "ם"]
# hindi
assert String.codepoints("अमृत") == ["अ", "म", "ृ", "त"]
# bengali
assert String.codepoints("স্পর্শমণি") == ["স", "্", "প", "র", "্", "শ", "ম", "ণ", "ি"]
# gujarati
assert String.codepoints("સર્વશ્રેષ્ઠ ઇલાજ") ==
["સ", "ર", "્", "વ", "શ", "્", "ર", "ે", "ષ", "્", "ઠ", " ", "ઇ", "લ", "ા", "જ"]
# japanese
assert String.codepoints("世界中の一番") == ["世", "界", "中", "の", "一", "番"]
assert String.codepoints("がガちゃ") == ["が", "ガ", "ち", "ゃ"]
assert String.codepoints("") == []
assert String.codepoints("ϖͲϥЫݎߟΈټϘለДШव׆ש؇؊صلټܗݎޥޘ߉ऌ૫ሏᶆ℆ℙℱ ⅚Ⅷ↠∈⌘①ffi") ==
["ϖ", "Ͳ", "ϥ", "Ы", "ݎ", "ߟ", "Έ"] ++
["ټ", "Ϙ", "ለ", "Д", "Ш", "व"] ++
["׆", "ש", "؇", "؊", "ص", "ل", "ټ"] ++
["ܗ", "ݎ", "ޥ", "ޘ", "߉", "ऌ", "૫"] ++
["ሏ", "ᶆ", "℆", "ℙ", "ℱ", " ", "⅚"] ++ ["Ⅷ", "↠", "∈", "⌘", "①", "ffi"]
end
test "equivalent?/2" do
assert String.equivalent?("", "")
assert String.equivalent?("elixir", "elixir")
assert String.equivalent?("뢴", "뢴")
assert String.equivalent?("ṩ", "ṩ")
refute String.equivalent?("ELIXIR", "elixir")
refute String.equivalent?("døge", "dóge")
end
test "graphemes/1" do
# Extended
assert String.graphemes("Ā̀stute") == ["Ā̀", "s", "t", "u", "t", "e"]
# CLRF
assert String.graphemes("\r\n\f") == ["\r\n", "\f"]
# Regional indicator
assert String.graphemes("\u{1F1E6}\u{1F1E7}") == ["\u{1F1E6}\u{1F1E7}"]
assert String.graphemes("\u{1F1E6}\u{1F1E7}\u{1F1E8}") == ["\u{1F1E6}\u{1F1E7}", "\u{1F1E8}"]
# Hangul
assert String.graphemes("\u1100\u115D\uB4A4") == ["ᄀᅝ뒤"]
# Special Marking with Extended
assert String.graphemes("a\u0300\u0903") == ["a\u0300\u0903"]
end
test "next_grapheme/1" do
assert String.next_grapheme("Ā̀stute") == {"Ā̀", "stute"}
assert String.next_grapheme("") == nil
end
test "first/1" do
assert String.first("elixir") == "e"
assert String.first("íelixr") == "í"
assert String.first("եոգլից ըմպելիք") == "ե"
assert String.first("лэіксір") == "л"
assert String.first("ελιξήριο") == "ε"
assert String.first("סם חיים") == "ס"
assert String.first("がガちゃ") == "が"
assert String.first("Ā̀stute") == "Ā̀"
assert String.first("") == nil
end
test "last/1" do
assert String.last("elixir") == "r"
assert String.last("elixrí") == "í"
assert String.last("եոգլից ըմպելիքե") == "ե"
assert String.last("ліксірэ") == "э"
assert String.last("ειξήριολ") == "λ"
assert String.last("סם ייםח") == "ח"
assert String.last("がガちゃ") == "ゃ"
assert String.last("Ā̀") == "Ā̀"
assert String.last("") == nil
end
test "length/1" do
assert String.length("elixir") == 6
assert String.length("elixrí") == 6
assert String.length("եոգլից") == 6
assert String.length("ліксрэ") == 6
assert String.length("ειξήριολ") == 8
assert String.length("סם ייםח") == 7
assert String.length("がガちゃ") == 4
assert String.length("Ā̀stute") == 6
assert String.length("👨👩👧👦") == 1
assert String.length("") == 0
end
test "at/2" do
assert String.at("л", 0) == "л"
assert String.at("elixir", 1) == "l"
assert String.at("がガちゃ", 2) == "ち"
assert String.at("л", 10) == nil
assert String.at("elixir", -1) == "r"
assert String.at("がガちゃ", -2) == "ち"
assert String.at("л", -3) == nil
assert String.at("Ā̀stute", 1) == "s"
assert String.at("elixir", 6) == nil
assert_raise FunctionClauseError, fn ->
String.at("elixir", 0.1)
end
assert_raise FunctionClauseError, fn ->
String.at("elixir", -0.1)
end
end
test "slice/2,3" do
assert String.slice("elixir", 1, 3) == "lix"
assert String.slice("あいうえお", 2, 2) == "うえ"
assert String.slice("ειξήριολ", 2, 3) == "ξήρ"
assert String.slice("elixir", 3, 4) == "xir"
assert String.slice("あいうえお", 3, 5) == "えお"
assert String.slice("ειξήριολ", 5, 4) == "ιολ"
assert String.slice("elixir", -3, 2) == "xi"
assert String.slice("あいうえお", -4, 3) == "いうえ"
assert String.slice("ειξήριολ", -5, 3) == "ήρι"
assert String.slice("elixir", -10, 1) == ""
assert String.slice("あいうえお", -10, 2) == ""
assert String.slice("ειξήριολ", -10, 3) == ""
assert String.slice("elixir", 8, 2) == ""
assert String.slice("あいうえお", 6, 2) == ""
assert String.slice("ειξήριολ", 8, 1) == ""
assert String.slice("ειξήριολ", 9, 1) == ""
assert String.slice("elixir", 0, 0) == ""
assert String.slice("elixir", 5, 0) == ""
assert String.slice("elixir", -5, 0) == ""
assert String.slice("", 0, 1) == ""
assert String.slice("", 1, 1) == ""
assert String.slice("elixir", 0..-2) == "elixi"
assert String.slice("elixir", 1..3) == "lix"
assert String.slice("elixir", -5..-3) == "lix"
assert String.slice("elixir", -5..3) == "lix"
assert String.slice("あいうえお", 2..3) == "うえ"
assert String.slice("ειξήριολ", 2..4) == "ξήρ"
assert String.slice("elixir", 3..6) == "xir"
assert String.slice("あいうえお", 3..7) == "えお"
assert String.slice("ειξήριολ", 5..8) == "ιολ"
assert String.slice("elixir", -3..-2) == "xi"
assert String.slice("あいうえお", -4..-2) == "いうえ"
assert String.slice("ειξήριολ", -5..-3) == "ήρι"
assert String.slice("elixir", 8..9) == ""
assert String.slice("あいうえお", 6..7) == ""
assert String.slice("ειξήριολ", 8..8) == ""
assert String.slice("ειξήριολ", 9..9) == ""
assert String.slice("", 0..0) == ""
assert String.slice("", 1..1) == ""
assert String.slice("あいうえお", -2..-4) == ""
assert String.slice("あいうえお", -10..-15) == ""
assert String.slice("hello あいうえお Unicode", 8..-1) == "うえお Unicode"
assert String.slice("abc", -1..14) == "c"
assert String.slice("a·̀ͯ‿.⁀:", 0..-2) == "a·̀ͯ‿.⁀"
assert_raise FunctionClauseError, fn ->
String.slice(nil, 0..1)
end
end
test "valid?/1" do
assert String.valid?("afds")
assert String.valid?("øsdfh")
assert String.valid?("dskfjあska")
assert String.valid?(<<0xEF, 0xB7, 0x90>>)
refute String.valid?(<<0xFFFF::16>>)
refute String.valid?("asd" <> <<0xFFFF::16>>)
end
test "chunk/2 with :valid trait" do
assert String.chunk("", :valid) == []
assert String.chunk("ødskfjあ\x11ska", :valid) == ["ødskfjあ\x11ska"]
end
test "chunk/2 with :printable trait" do
assert String.chunk("", :printable) == []
assert String.chunk("ødskfjあska", :printable) == ["ødskfjあska"]
assert String.chunk("abc\u{0FFFF}def", :printable) == ["abc", <<0x0FFFF::utf8>>, "def"]
assert String.chunk("\x06ab\x05cdef\x03\0", :printable) ==
[<<6>>, "ab", <<5>>, "cdef", <<3, 0>>]
end
test "starts_with?/2" do
assert String.starts_with?("hello", "he")
assert String.starts_with?("hello", "hello")
refute String.starts_with?("hello", [])
assert String.starts_with?("hello", ["hellö", "hell"])
assert String.starts_with?("エリクシア", "エリ")
refute String.starts_with?("hello", "lo")
refute String.starts_with?("hello", "hellö")
refute String.starts_with?("hello", ["hellö", "goodbye"])
refute String.starts_with?("エリクシア", "仙丹")
end
test "ends_with?/2" do
assert String.ends_with?("hello", "lo")
assert String.ends_with?("hello", "hello")
refute String.ends_with?("hello", [])
assert String.ends_with?("hello", ["hell", "lo", "xx"])
assert String.ends_with?("hello", ["hellö", "lo"])
assert String.ends_with?("エリクシア", "シア")
refute String.ends_with?("hello", "he")
refute String.ends_with?("hello", "hellö")
refute String.ends_with?("hello", ["hel", "goodbye"])
refute String.ends_with?("エリクシア", "仙丹")
end
test "contains?/2" do
assert String.contains?("elixir of life", "of")
assert String.contains?("エリクシア", "シ")
refute String.contains?("elixir of life", [])
assert String.contains?("elixir of life", ["mercury", "life"])
refute String.contains?("elixir of life", "death")
refute String.contains?("エリクシア", "仙")
refute String.contains?("elixir of life", ["death", "mercury", "eternal life"])
end
test "to_charlist/1" do
assert String.to_charlist("æß") == [?æ, ?ß]
assert String.to_charlist("abc") == [?a, ?b, ?c]
assert_raise UnicodeConversionError, "invalid encoding starting at <<223, 255>>", fn ->
String.to_charlist(<<0xDF, 0xFF>>)
end
assert_raise UnicodeConversionError, "incomplete encoding starting at <<195>>", fn ->
String.to_charlist(<<106, 111, 115, 195>>)
end
end
test "to_float/1" do
assert String.to_float("3.0") == 3.0
three = fn -> "3" end
assert_raise ArgumentError, fn -> String.to_float(three.()) end
end
test "jaro_distance/2" do
assert String.jaro_distance("same", "same") == 1.0
assert String.jaro_distance("any", "") == 0.0
assert String.jaro_distance("", "any") == 0.0
assert String.jaro_distance("martha", "marhta") == 0.9444444444444445
assert String.jaro_distance("martha", "marhha") == 0.888888888888889
assert String.jaro_distance("marhha", "martha") == 0.888888888888889
assert String.jaro_distance("dwayne", "duane") == 0.8222222222222223
assert String.jaro_distance("dixon", "dicksonx") == 0.7666666666666666
assert String.jaro_distance("xdicksonx", "dixon") == 0.7851851851851852
assert String.jaro_distance("shackleford", "shackelford") == 0.9696969696969697
assert String.jaro_distance("dunningham", "cunnigham") == 0.8962962962962964
assert String.jaro_distance("nichleson", "nichulson") == 0.9259259259259259
assert String.jaro_distance("jones", "johnson") == 0.7904761904761904
assert String.jaro_distance("massey", "massie") == 0.888888888888889
assert String.jaro_distance("abroms", "abrams") == 0.888888888888889
assert String.jaro_distance("hardin", "martinez") == 0.7222222222222222
assert String.jaro_distance("itman", "smith") == 0.4666666666666666
assert String.jaro_distance("jeraldine", "geraldine") == 0.9259259259259259
assert String.jaro_distance("michelle", "michael") == 0.8690476190476191
assert String.jaro_distance("julies", "julius") == 0.888888888888889
assert String.jaro_distance("tanya", "tonya") == 0.8666666666666667
assert String.jaro_distance("sean", "susan") == 0.7833333333333333
assert String.jaro_distance("jon", "john") == 0.9166666666666666
assert String.jaro_distance("jon", "jan") == 0.7777777777777777
assert String.jaro_distance("семена", "стремя") == 0.6666666666666666
end
test "myers_difference/2" do
assert String.myers_difference("", "abc") == [ins: "abc"]
assert String.myers_difference("abc", "") == [del: "abc"]
assert String.myers_difference("", "") == []
assert String.myers_difference("abc", "abc") == [eq: "abc"]
assert String.myers_difference("abc", "aйbc") == [eq: "a", ins: "й", eq: "bc"]
assert String.myers_difference("aйbc", "abc") == [eq: "a", del: "й", eq: "bc"]
end
test "normalize/2" do
assert String.normalize("ŝ", :nfd) == "ŝ"
assert String.normalize("ḇravô", :nfd) == "ḇravô"
assert String.normalize("ṩierra", :nfd) == "ṩierra"
assert String.normalize("뢴", :nfd) == "뢴"
assert String.normalize("êchǭ", :nfc) == "êchǭ"
assert String.normalize("거̄", :nfc) == "거̄"
assert String.normalize("뢴", :nfc) == "뢴"
## Error cases
assert String.normalize(<<15, 216>>, :nfc) == <<15, 216>>
assert String.normalize(<<15, 216>>, :nfd) == <<15, 216>>
assert String.normalize(<<216, 15>>, :nfc) == <<216, 15>>
assert String.normalize(<<216, 15>>, :nfd) == <<216, 15>>
assert String.normalize(<<15, 216>>, :nfkc) == <<15, 216>>
assert String.normalize(<<15, 216>>, :nfkd) == <<15, 216>>
assert String.normalize(<<216, 15>>, :nfkc) == <<216, 15>>
assert String.normalize(<<216, 15>>, :nfkd) == <<216, 15>>
## Cases from NormalizationTest.txt
# 05B8 05B9 05B1 0591 05C3 05B0 05AC 059F
# 05B1 05B8 05B9 0591 05C3 05B0 05AC 059F
# HEBREW POINT QAMATS, HEBREW POINT HOLAM, HEBREW POINT HATAF SEGOL,
# HEBREW ACCENT ETNAHTA, HEBREW PUNCTUATION SOF PASUQ, HEBREW POINT SHEVA,
# HEBREW ACCENT ILUY, HEBREW ACCENT QARNEY PARA
assert String.normalize("ֱָֹ֑׃ְ֬֟", :nfc) == "ֱָֹ֑׃ְ֬֟"
# 095D (exclusion list)
# 0922 093C
# DEVANAGARI LETTER RHA
assert String.normalize("ढ़", :nfc) == "ढ़"
# 0061 0315 0300 05AE 0340 0062
# 00E0 05AE 0300 0315 0062
# LATIN SMALL LETTER A, COMBINING COMMA ABOVE RIGHT, COMBINING GRAVE ACCENT,
# HEBREW ACCENT ZINOR, COMBINING GRAVE TONE MARK, LATIN SMALL LETTER B
assert String.normalize("à֮̀̕b", :nfc) == "à֮̀̕b"
# 0344
# 0308 0301
# COMBINING GREEK DIALYTIKA TONOS
assert String.normalize("\u0344", :nfc) == "\u0308\u0301"
# 115B9 0334 115AF
# 115B9 0334 115AF
# SIDDHAM VOWEL SIGN AI, COMBINING TILDE OVERLAY, SIDDHAM VOWEL SIGN AA
assert String.normalize("𑖹̴𑖯", :nfc) == "𑖹̴𑖯"
# HEBREW ACCENT ETNAHTA, HEBREW PUNCTUATION SOF PASUQ, HEBREW POINT SHEVA,
# HEBREW ACCENT ILUY, HEBREW ACCENT QARNEY PARA
assert String.normalize("ֱָֹ֑׃ְ֬֟", :nfc) == "ֱָֹ֑׃ְ֬֟"
# 095D (exclusion list)
# HEBREW ACCENT ETNAHTA, HEBREW PUNCTUATION SOF PASUQ, HEBREW POINT SHEVA,
# HEBREW ACCENT ILUY, HEBREW ACCENT QARNEY PARA
assert String.normalize("ֱָֹ֑׃ְ֬֟", :nfc) == "ֱָֹ֑׃ְ֬֟"
# 095D (exclusion list)
# 0922 093C
# DEVANAGARI LETTER RHA
assert String.normalize("ढ़", :nfc) == "ढ़"
# 0061 0315 0300 05AE 0340 0062
# 00E0 05AE 0300 0315 0062
# LATIN SMALL LETTER A, COMBINING COMMA ABOVE RIGHT, COMBINING GRAVE ACCENT,
# HEBREW ACCENT ZINOR, COMBINING GRAVE TONE MARK, LATIN SMALL LETTER B
assert String.normalize("à֮̀̕b", :nfc) == "à֮̀̕b"
# 0344
# 0308 0301
# COMBINING GREEK DIALYTIKA TONOS
assert String.normalize("\u0344", :nfc) == "\u0308\u0301"
# 115B9 0334 115AF
# 115B9 0334 115AF
# SIDDHAM VOWEL SIGN AI, COMBINING TILDE OVERLAY, SIDDHAM VOWEL SIGN AA
assert String.normalize("𑖹̴𑖯", :nfc) == "𑖹̴𑖯"
# (ff; ff; ff; ff; ff; ) LATIN SMALL LIGATURE FF
# FB00;FB00;FB00;0066 0066;0066 0066;
assert String.normalize("ff", :nfkd) == "\u0066\u0066"
# (fl; fl; fl; fl; fl; ) LATIN SMALL LIGATURE FL
# FB02;FB02;FB02;0066 006C;0066 006C;
assert String.normalize("fl", :nfkd) == "\u0066\u006C"
# (ſt; ſt; ſt; st; st; ) LATIN SMALL LIGATURE LONG S T
# FB05;FB05;FB05;0073 0074;0073 0074;
assert String.normalize("ſt", :nfkd) == "\u0073\u0074"
# (st; st; st; st; st; ) LATIN SMALL LIGATURE ST
# FB06;FB06;FB06;0073 0074;0073 0074;
assert String.normalize("\u0073\u0074", :nfkc) == "\u0073\u0074"
# (ﬓ; ﬓ; ﬓ; մն; մն; ) ARMENIAN SMALL LIGATURE MEN NOW
# FB13;FB13;FB13;0574 0576;0574 0576;
assert String.normalize("\u0574\u0576", :nfkc) == "\u0574\u0576"
end
# Carriage return can be a grapheme cluster if followed by
# newline so we test some corner cases here.
test "carriage return" do
assert String.at("\r\t\v", 0) == "\r"
assert String.at("\r\t\v", 1) == "\t"
assert String.at("\r\t\v", 2) == "\v"
assert String.at("\xFF\r\t\v", 1) == "\r"
assert String.at("\r\xFF\t\v", 2) == "\t"
assert String.at("\r\t\xFF\v", 3) == "\v"
assert String.last("\r\t\v") == "\v"
assert String.last("\r\xFF\t\xFF\v") == "\v"
assert String.next_grapheme("\r\t\v") == {"\r", "\t\v"}
assert String.next_grapheme("\t\v") == {"\t", "\v"}
assert String.next_grapheme("\v") == {"\v", ""}
assert String.length("\r\t\v") == 3
assert String.length("\r\xFF\t\v") == 4
assert String.length("\r\t\xFF\v") == 4
assert String.bag_distance("\r\t\xFF\v", "\xFF\r\n\xFF") == 0.25
assert String.split("\r\t\v", "") == ["", "\r", "\t", "\v", ""]
end
end
| 40.322826 | 98 | 0.56886 |
9e9af799384342e8c6a23a62719a7fa93fe4e74b | 119 | exs | Elixir | test/test_helper.exs | duzzifelipe/ex-stone-openbank | 5a0faa547c3aa3d7f3842739e50cca6b14337124 | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | duzzifelipe/ex-stone-openbank | 5a0faa547c3aa3d7f3842739e50cca6b14337124 | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | duzzifelipe/ex-stone-openbank | 5a0faa547c3aa3d7f3842739e50cca6b14337124 | [
"Apache-2.0"
] | null | null | null | # Configure formatters
ExUnit.configure(formatters: [JUnitFormatter, ExUnit.CLIFormatter])
# Run it :)
ExUnit.start()
| 19.833333 | 67 | 0.773109 |
9e9b3de44a5e39e88d2cf8d0ef624cec4e73f49d | 1,971 | ex | Elixir | clients/ad_sense_host/lib/google_api/ad_sense_host/v41/model/ad_unit_mobile_content_ads_settings.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/ad_sense_host/lib/google_api/ad_sense_host/v41/model/ad_unit_mobile_content_ads_settings.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/ad_sense_host/lib/google_api/ad_sense_host/v41/model/ad_unit_mobile_content_ads_settings.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AdSenseHost.V41.Model.AdUnitMobileContentAdsSettings do
@moduledoc """
Settings specific to WAP mobile content ads (AFMC - deprecated).
## Attributes
- markupLanguage (String.t): The markup language to use for this ad unit. Defaults to: `null`.
- scriptingLanguage (String.t): The scripting language to use for this ad unit. Defaults to: `null`.
- size (String.t): Size of this ad unit. Defaults to: `null`.
- type (String.t): Type of this ad unit. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:markupLanguage => any(),
:scriptingLanguage => any(),
:size => any(),
:type => any()
}
field(:markupLanguage)
field(:scriptingLanguage)
field(:size)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.AdSenseHost.V41.Model.AdUnitMobileContentAdsSettings do
def decode(value, options) do
GoogleApi.AdSenseHost.V41.Model.AdUnitMobileContentAdsSettings.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdSenseHost.V41.Model.AdUnitMobileContentAdsSettings do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.578947 | 102 | 0.729579 |
9e9b4b4f8dfe5fba9a462f021b830e8b827df65a | 1,659 | ex | Elixir | clients/manufacturers/lib/google_api/manufacturers/v1/model/count.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/manufacturers/lib/google_api/manufacturers/v1/model/count.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/manufacturers/lib/google_api/manufacturers/v1/model/count.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Manufacturers.V1.Model.Count do
@moduledoc """
The number of products in a single package. For more information, see
https://support.google.com/manufacturers/answer/6124116#count.
## Attributes
* `unit` (*type:* `String.t`, *default:* `nil`) - The unit in which these products are counted.
* `value` (*type:* `String.t`, *default:* `nil`) - The numeric value of the number of products in a package.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:unit => String.t(),
:value => String.t()
}
field(:unit)
field(:value)
end
defimpl Poison.Decoder, for: GoogleApi.Manufacturers.V1.Model.Count do
def decode(value, options) do
GoogleApi.Manufacturers.V1.Model.Count.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Manufacturers.V1.Model.Count do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.529412 | 112 | 0.720313 |
9e9b63a9ebeb80ff5ebd5c6781ab384058cf704f | 3,627 | exs | Elixir | test/options_test.exs | alde103/muontrap | 61ec638c2f2bcf30a4362f70eab2efd5833bf0ca | [
"Apache-2.0"
] | null | null | null | test/options_test.exs | alde103/muontrap | 61ec638c2f2bcf30a4362f70eab2efd5833bf0ca | [
"Apache-2.0"
] | null | null | null | test/options_test.exs | alde103/muontrap | 61ec638c2f2bcf30a4362f70eab2efd5833bf0ca | [
"Apache-2.0"
] | null | null | null | defmodule MuonTrap.OptionsTest do
use MuonTrapTest.Case
alias MuonTrap.Options
test "creates random cgroup path when asked" do
options = Options.validate(:cmd, "echo", [], cgroup_base: "base")
assert Map.has_key?(options, :cgroup_path)
["base", other] = String.split(options.cgroup_path, "/")
assert byte_size(other) > 4
end
test "disallow both cgroup_path and cgroup_base" do
assert_raise ArgumentError, fn ->
Options.validate(:cmd, "echo", [], cgroup_base: "base", cgroup_path: "path")
end
end
test "errors match System.cmd ones" do
for context <- [:cmd, :daemon] do
# :enoent on missing executable
assert catch_error(Options.validate(context, "__this_should_not_exist", [], [])) == :enoent
assert_raise ArgumentError, fn ->
Options.validate(context, "echo", ['not_a_binary'], [])
end
assert_raise ArgumentError, fn ->
Options.validate(context, "why\0would_someone_do_this", [], [])
end
end
end
test "cmd and daemon-specific options" do
# :cmd-only
assert Map.get(Options.validate(:cmd, "echo", [], into: ""), :into) == ""
assert_raise ArgumentError, fn ->
Options.validate(:daemon, "echo", [], into: "")
end
# :daemon-only
assert Map.get(Options.validate(:daemon, "echo", [], name: Something), :name) == Something
assert_raise ArgumentError, fn ->
Options.validate(:cmd, "echo", [], name: Something)
end
for level <- [:error, :warn, :info, :debug] do
assert Map.get(Options.validate(:daemon, "echo", [], log_output: level), :log_output) ==
level
assert_raise ArgumentError, fn ->
Options.validate(:cmd, "echo", [], log_output: level)
end
end
assert_raise ArgumentError, fn ->
Options.validate(:daemon, "echo", [], log_output: :bad_level)
end
assert_raise ArgumentError, fn ->
Options.validate(:daemon, "echo", [], msg_callback: false)
end
raise_msg = "Invalid :msg_callback, only functions with /1 arity are allowed"
assert_raise ArgumentError, raise_msg, fn ->
Options.validate(:daemon, "echo", [], msg_callback: &Kernel.+/2)
end
:daemon
|> Options.validate("echo", [], msg_callback: &inspect/1)
|> Map.get(:msg_callback)
|> Kernel.==(&inspect/1)
|> assert()
assert Map.get(Options.validate(:daemon, "echo", [], msg_callback: nil), :msg_callback) == nil
end
test "common commands basically work" do
input = [
cd: "path",
arg0: "arg0",
stderr_to_stdout: true,
parallelism: true,
uid: 5,
gid: "bill",
delay_to_sigkill: 1,
env: [{"KEY", "VALUE"}, {"KEY2", "VALUE2"}],
cgroup_controllers: ["memory", "cpu"],
cgroup_base: "base",
cgroup_sets: [{"memory", "memory.limit_in_bytes", "268435456"}]
]
for context <- [:daemon, :cmd] do
options = Options.validate(context, "echo", [], input)
assert Map.get(options, :cd) == "path"
assert Map.get(options, :arg0) == "arg0"
assert Map.get(options, :stderr_to_stdout) == true
assert Map.get(options, :parallelism) == true
assert Map.get(options, :uid) == 5
assert Map.get(options, :gid) == "bill"
assert Map.get(options, :delay_to_sigkill) == 1
assert Map.get(options, :env) == [{'KEY', 'VALUE'}, {'KEY2', 'VALUE2'}]
assert Map.get(options, :cgroup_controllers) == ["memory", "cpu"]
assert Map.get(options, :cgroup_base) == "base"
assert Map.get(options, :cgroup_sets) == [{"memory", "memory.limit_in_bytes", "268435456"}]
end
end
end
| 31.815789 | 98 | 0.618693 |
9e9b8930d1b432d2f2d1a19d4c5fdae4bdbf0e65 | 3,627 | ex | Elixir | lib/mmo_web/channels/room_channel.ex | sudokid-software/12-day-mmo | 8b3038e754078a3361de6c9b516cb192f2ef0202 | [
"BSD-2-Clause"
] | 1 | 2021-01-04T21:25:35.000Z | 2021-01-04T21:25:35.000Z | lib/mmo_web/channels/room_channel.ex | visheshc14/Prisonic-Fairytale | 91152fbf3662952146e29ff36a45ad20bebe4558 | [
"MIT"
] | null | null | null | lib/mmo_web/channels/room_channel.ex | visheshc14/Prisonic-Fairytale | 91152fbf3662952146e29ff36a45ad20bebe4558 | [
"MIT"
] | null | null | null | defmodule MmoWeb.RoomChannel do
use Phoenix.Channel
alias Mmo.World
alias Mmo.Player
def join("room:lobby", _message, socket) do
{player,
%World{
players: players,
background: background,
foreground: foreground,
items: items,
leaf: leaf,
enemies: enemies
}} = GenServer.call(GameWorld, {:new_player})
send(self(), {:new_player, player})
socket =
socket
|> assign(:player_id, player.id)
|> assign(:x, player.x)
|> assign(:y, player.y)
|> assign(:moving, false)
{:ok,
%{
background: background,
foreground: foreground,
items: Map.values(items),
leaf: leaf,
player: player,
players: Map.values(players),
enemies: Map.values(enemies)
}, socket}
end
def join("room:" <> _private_room_id, _params, _socket) do
{:error, %{reason: "unauthorized"}}
end
def handle_in("key_up", data, socket) do
broadcast!(socket, "update_world", data)
{:noreply, socket}
end
def handle_in(
"pointer_down",
%{"x" => x, "y" => y},
%{assigns: %{moving: true}} = socket
) do
{:noreply, assign(socket, %{x: x, y: y})}
end
def handle_in("pointer_down", %{"x" => x, "y" => y}, socket) do
schedule_work({:move_player})
{:noreply, assign(socket, %{x: x, y: y})}
end
def handle_in(_call, _data, socket) do
{:noreply, socket}
end
def handle_info({:new_player, player}, socket) do
broadcast!(socket, "new_player", %{player: Map.from_struct(player)})
{:noreply, socket}
end
def handle_info({:move_player}, %{assigns: %{player_id: player_id, x: x, y: y}} = socket) do
case GenServer.call(
GameWorld,
{:move_player, %{player_id: player_id, x: x, y: y}}
) do
{:static_object, {action, %Player{x: player_x, y: player_y} = player}} ->
broadcast!(socket, action, %{
player: player
})
{:noreply, assign(socket, %{moving: false, x: player_x, y: player_y})}
{"player_attack", player, enemy} ->
broadcast!(socket, "player_attack", %{
player: player,
enemy: enemy
})
{:noreply, assign(socket, :moving, false)}
{"item", %Player{x: player_x, y: player_y} = player, item, respawned} ->
is_moving =
if (player_x != x or player_y != y) and not respawned do
schedule_work({:move_player})
true
else
false
end
broadcast!(socket, "item", %{
player: player,
item: item
})
{:noreply, assign(socket, :moving, is_moving)}
{action, %Player{x: player_x, y: player_y} = player, respawned} ->
is_moving =
if (player_x != x or player_y != y) and not respawned do
schedule_work({:move_player})
true
else
false
end
broadcast!(socket, action, %{
player: player
})
{:noreply, assign(socket, :moving, is_moving)}
error ->
IO.inspect(error, label: "Error")
{:noreply, assign(socket, %{moving: false})}
end
end
def terminate(reason, %{assigns: %{player_id: player_id}} = socket) do
IO.inspect(%{reason: reason, player_id: player_id}, label: "Terminate")
broadcast!(socket, "player_left", %{player_id: player_id})
GenServer.cast(GameWorld, {:remove_player, player_id})
{:error, reason}
end
defp schedule_work(event, time \\ 300) do
# Interval in MS
Process.send_after(self(), event, time)
end
end
| 26.093525 | 94 | 0.565205 |
9e9b9daf5e48575766793bc47c0c59391e52b0b5 | 874 | ex | Elixir | clients/elixir/generated/lib/cloud_manager_api/model/environment.ex | shinesolutions/cloudmanager-api-clients | d73a25878f6cc57af954362ba8dccc90d54e6131 | [
"Apache-2.0"
] | 3 | 2020-06-23T05:31:52.000Z | 2020-11-26T05:34:57.000Z | clients/elixir/generated/lib/cloud_manager_api/model/environment.ex | shinesolutions/cloudmanager-api-clients | d73a25878f6cc57af954362ba8dccc90d54e6131 | [
"Apache-2.0"
] | 2 | 2021-01-21T01:19:54.000Z | 2021-12-09T22:30:22.000Z | clients/elixir/generated/lib/cloud_manager_api/model/environment.ex | shinesolutions/cloudmanager-api-clients | d73a25878f6cc57af954362ba8dccc90d54e6131 | [
"Apache-2.0"
] | 1 | 2020-11-18T11:48:13.000Z | 2020-11-18T11:48:13.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule CloudManagerAPI.Model.Environment do
@moduledoc """
An Environment that
"""
@derive [Poison.Encoder]
defstruct [
:"id",
:"programId",
:"name",
:"description",
:"type",
:"_links"
]
@type t :: %__MODULE__{
:"id" => String.t | nil,
:"programId" => String.t | nil,
:"name" => String.t | nil,
:"description" => String.t | nil,
:"type" => String.t | nil,
:"_links" => EnvironmentLinks | nil
}
end
defimpl Poison.Decoder, for: CloudManagerAPI.Model.Environment do
import CloudManagerAPI.Deserializer
def decode(value, options) do
value
|> deserialize(:"_links", :struct, CloudManagerAPI.Model.EnvironmentLinks, options)
end
end
| 23 | 91 | 0.646453 |
9e9bcf220fddf17b08a025daa661dc1a1f90f7ee | 50,845 | exs | Elixir | test/paginator_test.exs | KnotchLabs/paginator | 591adad651b42905338aec96eda82e83d4f4e6cd | [
"MIT"
] | null | null | null | test/paginator_test.exs | KnotchLabs/paginator | 591adad651b42905338aec96eda82e83d4f4e6cd | [
"MIT"
] | null | null | null | test/paginator_test.exs | KnotchLabs/paginator | 591adad651b42905338aec96eda82e83d4f4e6cd | [
"MIT"
] | null | null | null | defmodule PaginatorTest do
use Paginator.DataCase
doctest Paginator
alias Calendar.DateTime, as: DT
alias Paginator.Cursor
setup :create_customers_and_payments
test "paginates forward", %{
payments: {p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14}
} do
opts = [cursor_fields: [:charged_at, :id], sort_direction: :asc, limit: 4]
page = payments_by_charged_at() |> Repo.paginate(opts)
assert to_ids(page.entries) == to_ids([p6, p4, p1, p7])
assert page.metadata.after == encode_cursor(%{charged_at: p7.charged_at, id: p7.id})
page = payments_by_charged_at() |> Repo.paginate(opts ++ [after: page.metadata.after])
assert to_ids(page.entries) == to_ids([p8, p3, p11, p2])
assert page.metadata.after == encode_cursor(%{charged_at: p2.charged_at, id: p2.id})
page = payments_by_charged_at() |> Repo.paginate(opts ++ [after: page.metadata.after])
assert to_ids(page.entries) == to_ids([p13, p9, p10, p12])
assert page.metadata.after == encode_cursor(%{charged_at: p12.charged_at, id: p12.id})
page = payments_by_charged_at() |> Repo.paginate(opts ++ [after: page.metadata.after])
assert to_ids(page.entries) == to_ids([p5, p14])
assert page.metadata.after == nil
end
test "paginates forward with legacy cursor", %{
payments: {p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14}
} do
opts = [cursor_fields: [:charged_at, :id], sort_direction: :asc, limit: 4]
page = payments_by_charged_at() |> Repo.paginate(opts)
assert to_ids(page.entries) == to_ids([p6, p4, p1, p7])
assert %{charged_at: charged_at, id: id} = Cursor.decode(page.metadata.after)
assert charged_at == p7.charged_at
assert id == p7.id
legacy_cursor = encode_legacy_cursor([charged_at, id])
page = payments_by_charged_at() |> Repo.paginate(opts ++ [after: legacy_cursor])
assert to_ids(page.entries) == to_ids([p8, p3, p11, p2])
assert %{charged_at: charged_at, id: id} = Cursor.decode(page.metadata.after)
assert charged_at == p2.charged_at
assert id == p2.id
legacy_cursor = encode_legacy_cursor([charged_at, id])
page = payments_by_charged_at() |> Repo.paginate(opts ++ [after: legacy_cursor])
assert to_ids(page.entries) == to_ids([p13, p9, p10, p12])
assert %{charged_at: charged_at, id: id} = Cursor.decode(page.metadata.after)
assert charged_at == p12.charged_at
assert id == p12.id
legacy_cursor = encode_legacy_cursor([charged_at, id])
page = payments_by_charged_at() |> Repo.paginate(opts ++ [after: legacy_cursor])
assert to_ids(page.entries) == to_ids([p5, p14])
assert page.metadata.after == nil
end
test "paginates backward", %{
payments: {p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, _p14}
} do
opts = [cursor_fields: [:charged_at, :id], sort_direction: :asc, limit: 4]
page =
payments_by_charged_at()
|> Repo.paginate(opts ++ [before: encode_cursor(%{charged_at: p5.charged_at, id: p5.id})])
assert to_ids(page.entries) == to_ids([p13, p9, p10, p12])
assert page.metadata.before == encode_cursor(%{charged_at: p13.charged_at, id: p13.id})
page = payments_by_charged_at() |> Repo.paginate(opts ++ [before: page.metadata.before])
assert to_ids(page.entries) == to_ids([p8, p3, p11, p2])
assert page.metadata.before == encode_cursor(%{charged_at: p8.charged_at, id: p8.id})
page = payments_by_charged_at() |> Repo.paginate(opts ++ [before: page.metadata.before])
assert to_ids(page.entries) == to_ids([p6, p4, p1, p7])
assert page.metadata.after == encode_cursor(%{charged_at: p7.charged_at, id: p7.id})
assert page.metadata.before == nil
end
test "returns an empty page when there are no results" do
page =
payments_by_status("failed")
|> Repo.paginate(cursor_fields: [:charged_at, :id], limit: 10)
assert page.entries == []
assert page.metadata.after == nil
assert page.metadata.before == nil
end
describe "paginate a collection of payments, sorting by charged_at" do
test "sorts ascending without cursors", %{
payments: {p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at()
|> Repo.paginate(cursor_fields: [:charged_at, :id], sort_direction: :asc, limit: 50)
assert to_ids(entries) ==
to_ids([p6, p4, p1, p7, p8, p3, p11, p2, p13, p9, p10, p12, p5, p14])
assert metadata == %Metadata{after: nil, before: nil, limit: 50}
end
test "sorts ascending with before cursor", %{
payments: {_p1, p2, p3, _p4, p5, _p6, _p7, p8, p9, p10, p11, p12, p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at()
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :asc,
before: encode_cursor(%{charged_at: p5.charged_at, id: p5.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p8, p3, p11, p2, p13, p9, p10, p12])
assert metadata == %Metadata{
after: encode_cursor(%{charged_at: p12.charged_at, id: p12.id}),
before: encode_cursor(%{charged_at: p8.charged_at, id: p8.id}),
limit: 8
}
end
test "sorts ascending with after cursor", %{
payments: {_p1, p2, p3, _p4, p5, _p6, _p7, _p8, p9, p10, p11, p12, p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at()
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :asc,
after: encode_cursor(%{charged_at: p3.charged_at, id: p3.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p11, p2, p13, p9, p10, p12, p5, p14])
assert metadata == %Metadata{
after: nil,
before: encode_cursor(%{charged_at: p11.charged_at, id: p11.id}),
limit: 8
}
end
test "sorts ascending with before and after cursor", %{
payments: {_p1, p2, p3, _p4, _p5, _p6, _p7, _p8, p9, _p10, p11, _p12, p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at()
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :asc,
after: encode_cursor(%{charged_at: p3.charged_at, id: p3.id}),
before: encode_cursor(%{charged_at: p9.charged_at, id: p9.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p11, p2, p13])
assert metadata == %Metadata{
after: encode_cursor(%{charged_at: p13.charged_at, id: p13.id}),
before: encode_cursor(%{charged_at: p11.charged_at, id: p11.id}),
limit: 8
}
end
test "sorts ascending nulls last without cursors", %{
payments: {p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:asc_nulls_last)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :asc_nulls_last,
limit: 50
)
assert to_ids(entries) ==
to_ids([p6, p4, p1, p7, p8, p3, p11, p2, p13, p9, p10, p12, p5, p14])
assert metadata == %Metadata{after: nil, before: nil, limit: 50}
end
test "sorts ascending nulls last with before cursor", %{
payments: {_p1, p2, p3, _p4, p5, _p6, _p7, p8, p9, p10, p11, p12, p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:asc_nulls_last)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :asc_nulls_last,
before: encode_cursor(%{charged_at: p5.charged_at, id: p5.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p8, p3, p11, p2, p13, p9, p10, p12])
assert metadata == %Metadata{
after: encode_cursor(%{charged_at: p12.charged_at, id: p12.id}),
before: encode_cursor(%{charged_at: p8.charged_at, id: p8.id}),
limit: 8
}
end
test "sorts ascending nulls last with after cursor", %{
payments: {_p1, p2, p3, _p4, p5, _p6, _p7, _p8, p9, p10, p11, p12, p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:asc_nulls_last)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :asc_nulls_last,
after: encode_cursor(%{charged_at: p3.charged_at, id: p3.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p11, p2, p13, p9, p10, p12, p5, p14])
assert metadata == %Metadata{
after: nil,
before: encode_cursor(%{charged_at: p11.charged_at, id: p11.id}),
limit: 8
}
end
test "sorts ascending nulls last with before and after cursor", %{
payments: {_p1, p2, p3, _p4, _p5, _p6, _p7, _p8, p9, _p10, p11, _p12, p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:asc_nulls_last)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :asc_nulls_last,
after: encode_cursor(%{charged_at: p3.charged_at, id: p3.id}),
before: encode_cursor(%{charged_at: p9.charged_at, id: p9.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p11, p2, p13])
assert metadata == %Metadata{
after: encode_cursor(%{charged_at: p13.charged_at, id: p13.id}),
before: encode_cursor(%{charged_at: p11.charged_at, id: p11.id}),
limit: 8
}
end
test "sorts ascending nulls first without cursors", %{
payments: {p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:asc_nulls_first)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :asc_nulls_first,
limit: 50
)
assert to_ids(entries) ==
to_ids([p5, p14, p6, p4, p1, p7, p8, p3, p11, p2, p13, p9, p10, p12])
assert metadata == %Metadata{after: nil, before: nil, limit: 50}
end
test "sorts ascending nulls first with before cursor", %{
payments: {p1, p2, p3, p4, _p5, p6, p7, p8, _p9, _p10, p11, _p12, _p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:asc_nulls_first)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :asc_nulls_first,
before: encode_cursor(%{charged_at: p2.charged_at, id: p2.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p14, p6, p4, p1, p7, p8, p3, p11])
assert metadata == %Metadata{
after: encode_cursor(%{charged_at: p11.charged_at, id: p11.id}),
before: encode_cursor(%{charged_at: p14.charged_at, id: p14.id}),
limit: 8
}
end
test "sorts ascending nulls first with after cursor", %{
payments: {p1, _p2, p3, p4, p5, p6, p7, p8, _p9, _p10, p11, _p12, _p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:asc_nulls_first)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :asc_nulls_first,
after: encode_cursor(%{charged_at: p5.charged_at, id: p5.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p14, p6, p4, p1, p7, p8, p3, p11])
assert metadata == %Metadata{
after: encode_cursor(%{charged_at: p11.charged_at, id: p11.id}),
before: encode_cursor(%{charged_at: p14.charged_at, id: p14.id}),
limit: 8
}
end
test "sorts ascending nulls first with before and after cursor", %{
payments: {p1, _p2, _p3, p4, p5, p6, _p7, _p8, _p9, _p10, _p11, _p12, _p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:asc_nulls_first)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :asc_nulls_first,
after: encode_cursor(%{charged_at: p5.charged_at, id: p5.id}),
before: encode_cursor(%{charged_at: p1.charged_at, id: p1.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p14, p6, p4])
assert metadata == %Metadata{
after: encode_cursor(%{charged_at: p4.charged_at, id: p4.id}),
before: encode_cursor(%{charged_at: p14.charged_at, id: p14.id}),
limit: 8
}
end
test "sorts descending without cursors", %{
payments: {p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:desc)
|> Repo.paginate(cursor_fields: [:charged_at, :id], sort_direction: :desc, limit: 50)
assert to_ids(entries) ==
to_ids([p14, p5, p12, p10, p9, p13, p2, p11, p3, p8, p7, p1, p4, p6])
assert metadata == %Metadata{after: nil, before: nil, limit: 50}
end
test "sorts descending with before cursor", %{
payments: {_p1, _p2, _p3, _p4, p5, _p6, _p7, _p8, _p9, p10, _p11, p12, _p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:desc)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :desc,
before: encode_cursor(%{charged_at: p10.charged_at, id: p10.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p14, p5, p12])
assert metadata == %Metadata{
after: encode_cursor(%{charged_at: p12.charged_at, id: p12.id}),
before: nil,
limit: 8
}
end
test "sorts descending with after cursor", %{
payments: {p1, p2, p3, _p4, _p5, _p6, p7, p8, p9, p10, p11, _p12, p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:desc)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :desc,
after: encode_cursor(%{charged_at: p10.charged_at, id: p10.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p9, p13, p2, p11, p3, p8, p7, p1])
assert metadata == %Metadata{
after: encode_cursor(%{charged_at: p1.charged_at, id: p1.id}),
before: encode_cursor(%{charged_at: p9.charged_at, id: p9.id}),
limit: 8
}
end
test "sorts descending with before and after cursor", %{
payments: {_p1, p2, p3, _p4, _p5, _p6, _p7, _p8, p9, p10, p11, _p12, p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:desc)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :desc,
after: encode_cursor(%{charged_at: p10.charged_at, id: p10.id}),
before: encode_cursor(%{charged_at: p3.charged_at, id: p3.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p9, p13, p2, p11])
assert metadata == %Metadata{
after: encode_cursor(%{charged_at: p11.charged_at, id: p11.id}),
before: encode_cursor(%{charged_at: p9.charged_at, id: p9.id}),
limit: 8
}
end
test "sorts descending nulls first without cursors", %{
payments: {p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:desc_nulls_first)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :desc_nulls_first,
limit: 50
)
assert to_ids(entries) ==
to_ids([p14, p5, p12, p10, p9, p13, p2, p11, p3, p8, p7, p1, p4, p6])
assert metadata == %Metadata{after: nil, before: nil, limit: 50}
end
test "sorts descending nulls first with before cursor", %{
payments: {_p1, _p2, _p3, _p4, p5, _p6, _p7, _p8, _p9, p10, _p11, p12, _p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:desc_nulls_first)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :desc_nulls_first,
before: encode_cursor(%{charged_at: p10.charged_at, id: p10.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p14, p5, p12])
assert metadata == %Metadata{
after: encode_cursor(%{charged_at: p12.charged_at, id: p12.id}),
before: nil,
limit: 8
}
end
test "sorts descending nulls first with after cursor", %{
payments: {p1, p2, p3, _p4, _p5, _p6, p7, p8, p9, p10, p11, _p12, p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:desc_nulls_first)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :desc_nulls_first,
after: encode_cursor(%{charged_at: p10.charged_at, id: p10.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p9, p13, p2, p11, p3, p8, p7, p1])
assert metadata == %Metadata{
after: encode_cursor(%{charged_at: p1.charged_at, id: p1.id}),
before: encode_cursor(%{charged_at: p9.charged_at, id: p9.id}),
limit: 8
}
end
test "sorts descending nulls first with before and after cursor", %{
payments: {_p1, p2, p3, _p4, _p5, _p6, _p7, _p8, p9, p10, p11, _p12, p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:desc_nulls_first)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :desc_nulls_first,
after: encode_cursor(%{charged_at: p10.charged_at, id: p10.id}),
before: encode_cursor(%{charged_at: p3.charged_at, id: p3.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p9, p13, p2, p11])
assert metadata == %Metadata{
after: encode_cursor(%{charged_at: p11.charged_at, id: p11.id}),
before: encode_cursor(%{charged_at: p9.charged_at, id: p9.id}),
limit: 8
}
end
test "sorts descending nulls last without cursors", %{
payments: {p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:desc_nulls_last)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :desc_nulls_last,
limit: 50
)
assert to_ids(entries) ==
to_ids([p12, p10, p9, p13, p2, p11, p3, p8, p7, p1, p4, p6, p14, p5])
assert metadata == %Metadata{after: nil, before: nil, limit: 50}
end
test "sorts descending nulls last with before cursor", %{
payments: {p1, _p2, p3, p4, p5, p6, p7, p8, _p9, _p10, p11, _p12, _p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:desc_nulls_last)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :desc_nulls_last,
before: encode_cursor(%{charged_at: p5.charged_at, id: p5.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p11, p3, p8, p7, p1, p4, p6, p14])
assert metadata == %Metadata{
after: encode_cursor(%{charged_at: p14.charged_at, id: p14.id}),
before: encode_cursor(%{charged_at: p11.charged_at, id: p11.id}),
limit: 8
}
end
test "sorts descending nulls last with after cursor", %{
payments: {p1, _p2, p3, p4, p5, p6, p7, p8, _p9, _p10, p11, _p12, _p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:desc_nulls_last)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :desc_nulls_last,
after: encode_cursor(%{charged_at: p11.charged_at, id: p11.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p3, p8, p7, p1, p4, p6, p14, p5])
assert metadata == %Metadata{
after: nil,
before: encode_cursor(%{charged_at: p3.charged_at, id: p3.id}),
limit: 8
}
end
test "sorts descending nulls last with before and after cursor", %{
payments: {_p1, p2, p3, _p4, _p5, _p6, _p7, _p8, p9, p10, p11, _p12, p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:desc_nulls_last)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :desc_nulls_last,
after: encode_cursor(%{charged_at: p10.charged_at, id: p10.id}),
before: encode_cursor(%{charged_at: p3.charged_at, id: p3.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p9, p13, p2, p11])
assert metadata == %Metadata{
after: encode_cursor(%{charged_at: p11.charged_at, id: p11.id}),
before: encode_cursor(%{charged_at: p9.charged_at, id: p9.id}),
limit: 8
}
end
test "sorts ascending with before cursor at beginning of collection", %{
payments: {_p1, _p2, _p3, _p4, _p5, p6, _p7, _p8, _p9, _p10, _p11, _p12, _p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at()
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :asc,
before: encode_cursor(%{charged_at: p6.charged_at, id: p6.id}),
limit: 8
)
assert to_ids(entries) == to_ids([])
assert metadata == %Metadata{after: nil, before: nil, limit: 8}
end
test "sorts ascending with after cursor at end of collection", %{
payments: {_p1, _p2, _p3, _p4, _p5, _p6, _p7, _p8, _p9, _p10, _p11, _p12, _p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at()
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :asc,
after: encode_cursor(%{charged_at: p14.charged_at, id: p14.id}),
limit: 8
)
assert to_ids(entries) == to_ids([])
assert metadata == %Metadata{after: nil, before: nil, limit: 8}
end
test "sorts descending with before cursor at beginning of collection", %{
payments: {_p1, _p2, _p3, _p4, _p5, _p6, _p7, _p8, _p9, _p10, _p11, _p12, _p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:desc)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :desc,
before: encode_cursor(%{charged_at: p14.charged_at, id: p14.id}),
limit: 8
)
assert to_ids(entries) == to_ids([])
assert metadata == %Metadata{after: nil, before: nil, limit: 8}
end
test "sorts descending with after cursor at end of collection", %{
payments: {_p1, _p2, _p3, _p4, _p5, p6, _p7, _p8, _p9, _p10, _p11, _p12, _p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:desc)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :desc,
after: encode_cursor(%{charged_at: p6.charged_at, id: p6.id}),
limit: 8
)
assert to_ids(entries) == to_ids([])
assert metadata == %Metadata{after: nil, before: nil, limit: 8}
end
end
describe "paginate a collection of payments with customer filter, sorting by amount, charged_at" do
test "multiple cursor_fields with pre-existing where filter in query", %{
customers: {c1, _c2, _c3},
payments: {_p1, _p2, _p3, _p4, _p5, p6, p7, p8, p9, _p10, _p11, _p12, _p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
customer_payments_by_charged_at_and_amount(c1)
|> Repo.paginate(cursor_fields: [:charged_at, :amount, :id], limit: 2)
assert to_ids(entries) == to_ids([p6, p7])
%Page{entries: entries, metadata: _metadata} =
customer_payments_by_charged_at_and_amount(c1)
|> Repo.paginate(
cursor_fields: [:charged_at, :amount, :id],
limit: 2,
after: metadata.after
)
assert to_ids(entries) == to_ids([p8, p9])
end
test "before cursor with multiple cursor_fields and pre-existing where filter in query", %{
customers: {c1, _c2, _c3},
payments: {_p1, _p2, _p3, _p4, _p5, p6, p7, _p8, _p9, _p10, _p11, _p12, _p13, _p14}
} do
assert %Page{entries: [payment], metadata: _metadata} =
customer_payments_by_charged_at_and_amount(c1)
|> Repo.paginate(
cursor_fields: [:charged_at, :amount, :id],
before:
encode_cursor(%{amount: p7.amount, charged_at: p7.charged_at, id: p7.id}),
limit: 1
)
assert payment.id == p6.id
end
end
describe "paginate a collection of payments, sorting by customer name" do
test "raises error when binding not found", %{
payments: {_p1, _p2, _p3, _p4, _p5, _p6, _p7, _p8, _p9, _p10, _p11, p12, _p13, _p14}
} do
assert_raise ArgumentError,
"Could not find binding `bogus_binding` in query aliases: %{customer: 1, payments: 0}",
fn ->
%Page{} =
payments_by_customer_name()
|> Repo.paginate(
cursor_fields: [
{{:bogus_binding, :id}, :asc},
{{:bogus_binding, :name}, :asc}
],
limit: 50,
before:
encode_cursor(%{
{:bogus_binding, :id} => p12.id,
{:bogus_binding, :name} => p12.customer.name
})
)
end
end
test "sorts with mixed bindingless, bound columns", %{
payments: {_p1, _p2, _p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, _p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_customer_name()
|> Repo.paginate(
cursor_fields: [{:id, :asc}, {{:customer, :name}, :asc}],
before: encode_cursor(%{:id => p12.id, {:customer, :name} => p12.customer.name}),
limit: 8
)
assert to_ids(entries) == to_ids([p4, p5, p6, p7, p8, p9, p10, p11])
assert metadata == %Metadata{
after: encode_cursor(%{:id => p11.id, {:customer, :name} => p11.customer.name}),
before: encode_cursor(%{:id => p4.id, {:customer, :name} => p4.customer.name}),
limit: 8
}
end
test "sorts with mixed columns without direction and bound columns", %{
payments: {_p1, _p2, _p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, _p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_customer_name()
|> Repo.paginate(
cursor_fields: [:id, {{:customer, :name}, :asc}],
before: encode_cursor(%{:id => p12.id, {:customer, :name} => p12.customer.name}),
limit: 8
)
assert to_ids(entries) == to_ids([p4, p5, p6, p7, p8, p9, p10, p11])
assert metadata == %Metadata{
after: encode_cursor(%{:id => p11.id, {:customer, :name} => p11.customer.name}),
before: encode_cursor(%{:id => p4.id, {:customer, :name} => p4.customer.name}),
limit: 8
}
end
test "sorts ascending without cursors", %{
payments: {p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_customer_name()
|> Repo.paginate(
cursor_fields: [{{:customer, :name}, :asc}, {{:payments, :id}, :asc}],
limit: 50
)
assert to_ids(entries) ==
to_ids([p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14])
assert metadata == %Metadata{after: nil, before: nil, limit: 50}
end
test "sorts ascending with before cursor", %{
payments: {_p1, _p2, _p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, _p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_customer_name()
|> Repo.paginate(
cursor_fields: [{{:customer, :name}, :asc}, {{:payments, :id}, :asc}],
before:
encode_cursor(%{{:payments, :id} => p12.id, {:customer, :name} => p12.customer.name}),
limit: 8
)
assert to_ids(entries) == to_ids([p4, p5, p6, p7, p8, p9, p10, p11])
assert metadata == %Metadata{
after:
encode_cursor(%{
{:payments, :id} => p11.id,
{:customer, :name} => p11.customer.name
}),
before:
encode_cursor(%{
{:payments, :id} => p4.id,
{:customer, :name} => p4.customer.name
}),
limit: 8
}
end
test "sorts ascending with after cursor", %{
payments: {_p1, _p2, _p3, _p4, _p5, _p6, p7, p8, p9, p10, p11, p12, p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_customer_name()
|> Repo.paginate(
cursor_fields: [{{:customer, :name}, :asc}, {{:payments, :id}, :asc}],
after:
encode_cursor(%{{:payments, :id} => p7.id, {:customer, :name} => p7.customer.name}),
limit: 8
)
assert to_ids(entries) == to_ids([p8, p9, p10, p11, p12, p13, p14])
assert metadata == %Metadata{
after: nil,
before:
encode_cursor(%{
{:payments, :id} => p8.id,
{:customer, :name} => p8.customer.name
}),
limit: 8
}
end
test "sorts ascending with before and after cursor", %{
payments: {_p1, _p2, _p3, _p4, _p5, _p6, p7, p8, p9, p10, p11, _p12, _p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_customer_name()
|> Repo.paginate(
cursor_fields: [{{:payments, :id}, :asc}, {{:customer, :name}, :asc}],
after:
encode_cursor(%{{:payments, :id} => p7.id, {:customer, :name} => p7.customer.name}),
before:
encode_cursor(%{{:payments, :id} => p11.id, {:customer, :name} => p11.customer.name}),
limit: 8
)
assert to_ids(entries) == to_ids([p8, p9, p10])
assert metadata == %Metadata{
after:
encode_cursor(%{
{:payments, :id} => p10.id,
{:customer, :name} => p10.customer.name
}),
before:
encode_cursor(%{
{:payments, :id} => p8.id,
{:customer, :name} => p8.customer.name
}),
limit: 8
}
end
test "sorts descending without cursors", %{
payments: {p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_customer_name(:desc, :desc)
|> Repo.paginate(
cursor_fields: [{{:customer, :name}, :desc}, {{:payments, :id}, :desc}],
limit: 50
)
assert to_ids(entries) ==
to_ids([p14, p13, p12, p11, p10, p9, p8, p7, p6, p5, p4, p3, p2, p1])
assert metadata == %Metadata{after: nil, before: nil, limit: 50}
end
test "sorts descending with before cursor", %{
payments: {_p1, _p2, _p3, _p4, _p5, _p6, _p7, _p8, _p9, _p10, _p11, p12, p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_customer_name(:desc)
|> Repo.paginate(
cursor_fields: [{{:customer, :name}, :desc}, {{:payments, :id}, :desc}],
before:
encode_cursor(%{{:payments, :id} => p12.id, {:customer, :name} => p12.customer.name}),
limit: 8
)
assert to_ids(entries) == to_ids([p14, p13])
assert metadata == %Metadata{
after:
encode_cursor(%{
{:payments, :id} => p13.id,
{:customer, :name} => p13.customer.name
}),
before: nil,
limit: 8
}
end
test "sorts descending with after cursor", %{
payments: {_p1, _p2, _p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, _p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_customer_name(:desc, :desc)
|> Repo.paginate(
cursor_fields: [{{:customer, :name}, :desc}, {{:payments, :id}, :desc}],
sort_direction: :desc,
after:
encode_cursor(%{{:payments, :id} => p12.id, {:customer, :name} => p12.customer.name}),
limit: 8
)
assert to_ids(entries) == to_ids([p11, p10, p9, p8, p7, p6, p5, p4])
assert metadata == %Metadata{
after:
encode_cursor(%{
{:payments, :id} => p4.id,
{:customer, :name} => p4.customer.name
}),
before:
encode_cursor(%{
{:payments, :id} => p11.id,
{:customer, :name} => p11.customer.name
}),
limit: 8
}
end
test "sorts descending with before and after cursor", %{
payments: {_p1, _p2, _p3, _p4, _p5, _p6, p7, p8, p9, p10, p11, p12, _p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_customer_name(:desc, :desc)
|> Repo.paginate(
cursor_fields: [{{:customer, :name}, :desc}, {{:payments, :id}, :desc}],
after:
encode_cursor(%{{:payments, :id} => p12.id, {:customer, :name} => p12.customer.name}),
before:
encode_cursor(%{{:payments, :id} => p7.id, {:customer, :name} => p7.customer.name}),
limit: 8
)
assert to_ids(entries) == to_ids([p11, p10, p9, p8])
assert metadata == %Metadata{
after:
encode_cursor(%{
{:payments, :id} => p8.id,
{:customer, :name} => p8.customer.name
}),
before:
encode_cursor(%{
{:payments, :id} => p11.id,
{:customer, :name} => p11.customer.name
}),
limit: 8
}
end
test "sorts ascending with before cursor at beginning of collection", %{
payments: {p1, _p2, _p3, _p4, _p5, _p6, _p7, _p8, _p9, _p10, _p11, _p12, _p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_customer_name()
|> Repo.paginate(
cursor_fields: [{{:customer, :name}, :asc}, {{:payments, :id}, :asc}],
before:
encode_cursor(%{{:payments, :id} => p1.id, {:customer, :name} => p1.customer.name}),
limit: 8
)
assert to_ids(entries) == to_ids([])
assert metadata == %Metadata{after: nil, before: nil, limit: 8}
end
test "sorts ascending with after cursor at end of collection", %{
payments: {_p1, _p2, _p3, _p4, _p5, _p6, _p7, _p8, _p9, _p10, _p11, _p12, _p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_customer_name()
|> Repo.paginate(
cursor_fields: [{{:customer, :name}, :asc}, {{:payments, :id}, :asc}],
after:
encode_cursor(%{{:payments, :id} => p14.id, {:customer, :name} => p14.customer.name}),
limit: 8
)
assert to_ids(entries) == to_ids([])
assert metadata == %Metadata{after: nil, before: nil, limit: 8}
end
test "sorts descending with before cursor at beginning of collection", %{
payments: {_p1, _p2, _p3, _p4, _p5, _p6, _p7, _p8, _p9, _p10, _p11, _p12, _p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_customer_name(:desc, :desc)
|> Repo.paginate(
cursor_fields: [{{:customer, :name}, :desc}, {{:payments, :id}, :desc}],
before:
encode_cursor(%{{:payments, :id} => p14.id, {:customer, :name} => p14.customer.name}),
limit: 8
)
assert to_ids(entries) == to_ids([])
assert metadata == %Metadata{after: nil, before: nil, limit: 8}
end
test "sorts descending with after cursor at end of collection", %{
payments: {p1, _p2, _p3, _p4, _p5, _p6, _p7, _p8, _p9, _p10, _p11, _p12, _p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_customer_name(:desc, :desc)
|> Repo.paginate(
cursor_fields: [{{:payments, :id}, :desc}, {{:customer, :name}, :desc}],
after:
encode_cursor(%{{:payments, :id} => p1.id, {:customer, :name} => p1.customer.name}),
limit: 8
)
assert to_ids(entries) == to_ids([])
assert metadata == %Metadata{after: nil, before: nil, limit: 8}
end
test "sorts on 2nd level join column with a custom cursor value function", %{
payments: {_p1, _p2, _p3, _p4, _p5, p6, p7, p8, _p9, _p10, _p11, _p12, _p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_address_city()
|> Repo.paginate(
cursor_fields: [{{:address, :city}, :asc}, id: :asc],
before: nil,
limit: 3,
fetch_cursor_value_fun: fn
schema, {:address, :city} ->
schema.customer.address.city
schema, field ->
Paginator.default_fetch_cursor_value(schema, field)
end
)
assert to_ids(entries) == to_ids([p6, p7, p8])
p8 = Repo.preload(p8, customer: :address)
assert metadata == %Metadata{
after:
encode_cursor(%{{:address, :city} => p8.customer.address.city, :id => p8.id}),
before: nil,
limit: 3
}
end
test "sorts with respect to nil values", %{
payments: {
_p1,
_p2,
_p3,
_p4,
_p5,
_p6,
_p7,
_p8,
_p9,
_p10,
p11,
_p12,
_p13,
p14
}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_charged_at(:desc)
|> Repo.paginate(
cursor_fields: [:charged_at, :id],
sort_direction: :desc,
after: encode_cursor(%{charged_at: nil, id: nil}),
limit: 8
)
assert Enum.count(entries) == 8
assert metadata == %Metadata{
before: encode_cursor(%{charged_at: p14.charged_at, id: p14.id}),
limit: 8,
after: encode_cursor(%{charged_at: p11.charged_at, id: p11.id})
}
end
end
test "applies a default limit if none is provided", %{
payments: {p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_customer_name()
|> Repo.paginate(cursor_fields: [:id], sort_direction: :asc)
assert to_ids(entries) ==
to_ids([p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14])
assert metadata == %Metadata{after: nil, before: nil, limit: 50}
end
test "enforces the minimum limit", %{
payments: {p1, _p2, _p3, _p4, _p5, _p6, _p7, _p8, _p9, _p10, _p11, _p12, _p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_customer_name()
|> Repo.paginate(cursor_fields: [{:customer, :name}, :id], sort_direction: :asc, limit: 0)
assert to_ids(entries) == to_ids([p1])
assert metadata == %Metadata{
after: encode_cursor(%{{:customer, :name} => p1.customer.name, :id => p1.id}),
before: nil,
limit: 1
}
end
describe "with include_total_count" do
test "when set to :infinity", %{
payments: {_p1, _p2, _p3, _p4, p5, _p6, _p7, _p8, _p9, _p10, _p11, _p12, _p13, _p14}
} do
%Page{metadata: metadata} =
payments_by_customer_name()
|> Repo.paginate(
cursor_fields: [:id],
sort_direction: :asc,
limit: 5,
total_count_limit: :infinity,
include_total_count: true
)
assert metadata == %Metadata{
after: encode_cursor(%{id: p5.id}),
before: nil,
limit: 5,
total_count: 14,
total_count_cap_exceeded: false
}
end
test "when cap not exceeded", %{
payments: {_p1, _p2, _p3, _p4, p5, _p6, _p7, _p8, _p9, _p10, _p11, _p12, _p13, _p14}
} do
%Page{metadata: metadata} =
payments_by_customer_name()
|> Repo.paginate(
cursor_fields: [:id],
sort_direction: :asc,
limit: 5,
include_total_count: true
)
assert metadata == %Metadata{
after: encode_cursor(%{id: p5.id}),
before: nil,
limit: 5,
total_count: 14,
total_count_cap_exceeded: false
}
end
test "when cap exceeded", %{
payments: {_p1, _p2, _p3, _p4, p5, _p6, _p7, _p8, _p9, _p10, _p11, _p12, _p13, _p14}
} do
%Page{metadata: metadata} =
payments_by_customer_name()
|> Repo.paginate(
cursor_fields: [:id],
sort_direction: :asc,
limit: 5,
include_total_count: true,
total_count_limit: 10
)
assert metadata == %Metadata{
after: encode_cursor(%{id: p5.id}),
before: nil,
limit: 5,
total_count: 10,
total_count_cap_exceeded: true
}
end
test "when custom total_count_primary_key_field", %{
addresses: {_a1, a2, _a3}
} do
%Page{metadata: metadata} =
from(a in Address, select: a)
|> Repo.paginate(
cursor_fields: [:city],
sort_direction: :asc,
limit: 2,
include_total_count: true,
total_count_primary_key_field: :city
)
assert metadata == %Metadata{
after: encode_cursor(%{city: a2.city}),
before: nil,
limit: 2,
total_count: 3,
total_count_cap_exceeded: false
}
end
end
test "when before parameter is erlang term, we do not execute the code", %{} do
# before and after, are user inputs, we need to make sure that they are
# handled safely.
test_pid = self()
exploit = fn _, _ ->
send(test_pid, :rce)
{:cont, []}
end
payload =
exploit
|> :erlang.term_to_binary()
|> Base.url_encode64()
assert_raise(ArgumentError, ~r/^cannot deserialize.+/, fn ->
payments_by_amount_and_charged_at(:asc, :desc)
|> Repo.paginate(
cursor_fields: [amount: :asc, charged_at: :desc, id: :asc],
before: payload,
limit: 3
)
end)
refute_receive :rce, 1000, "Remote Code Execution Detected"
end
test "per-record cursor generation", %{
payments: {p1, _p2, _p3, _p4, _p5, _p6, _p7, p8, _p9, _p10, _p11, _p12, _p13, _p14}
} do
assert Paginator.cursor_for_record(p1, charged_at: :asc, id: :asc) ==
encode_cursor(%{charged_at: p1.charged_at, id: p1.id})
assert Paginator.cursor_for_record(p8, amount: :asc) == encode_cursor(%{amount: p8.amount})
end
test "per-record cursor generation with custom cursor value function", %{
payments: {p1, _p2, _p3, _p4, _p5, _p6, _p7, _p8, _p9, _p10, _p11, _p12, _p13, _p14}
} do
assert Paginator.cursor_for_record(p1, [charged_at: :asc, id: :asc], fn schema, field ->
case field do
:id -> Map.get(schema, :id)
_ -> "10"
end
end) == encode_cursor(%{charged_at: "10", id: p1.id})
end
test "sorts on two different directions with before cursor", %{
payments: {_p1, _p2, _p3, p4, p5, p6, p7, _p8, _p9, _p10, _p11, _p12, _p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_amount_and_charged_at(:asc, :desc)
|> Repo.paginate(
cursor_fields: [amount: :asc, charged_at: :desc, id: :asc],
before: encode_cursor(%{amount: p6.amount, charged_at: p6.charged_at, id: p6.id}),
limit: 3
)
assert to_ids(entries) == to_ids([p5, p7, p4])
assert metadata == %Metadata{
after: encode_cursor(%{amount: p4.amount, charged_at: p4.charged_at, id: p4.id}),
before: nil,
limit: 3
}
end
test "sorts on two different directions with after cursor", %{
payments: {_p1, _p2, _p3, p4, _p5, p6, _p7, p8, p9, _p10, _p11, _p12, _p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_amount_and_charged_at(:asc, :desc)
|> Repo.paginate(
cursor_fields: [amount: :asc, charged_at: :desc, id: :asc],
after: encode_cursor(%{amount: p4.amount, charged_at: p4.charged_at, id: p4.id}),
limit: 3
)
assert to_ids(entries) == to_ids([p6, p8, p9])
assert metadata == %Metadata{
after: encode_cursor(%{amount: p9.amount, charged_at: p9.charged_at, id: p9.id}),
before: encode_cursor(%{amount: p6.amount, charged_at: p6.charged_at, id: p6.id}),
limit: 3
}
end
test "sorts on two different directions with before and after cursor", %{
payments: {_p1, _p2, _p3, p4, _p5, p6, p7, p8, p9, _p10, _p11, _p12, _p13, _p14}
} do
%Page{entries: entries, metadata: metadata} =
payments_by_amount_and_charged_at(:desc, :asc)
|> Repo.paginate(
cursor_fields: [amount: :desc, charged_at: :asc, id: :asc],
after: encode_cursor(%{amount: p9.amount, charged_at: p9.charged_at, id: p9.id}),
before: encode_cursor(%{amount: p7.amount, charged_at: p7.charged_at, id: p7.id}),
limit: 8
)
assert to_ids(entries) == to_ids([p8, p6, p4])
assert metadata == %Metadata{
after: encode_cursor(%{amount: p4.amount, charged_at: p4.charged_at, id: p4.id}),
before: encode_cursor(%{amount: p8.amount, charged_at: p8.charged_at, id: p8.id}),
limit: 8
}
end
defp to_ids(entries), do: Enum.map(entries, & &1.id)
defp create_customers_and_payments(_context) do
c1 = insert(:customer, %{name: "Bob"})
c2 = insert(:customer, %{name: "Alice"})
c3 = insert(:customer, %{name: "Charlie"})
a1 = insert(:address, city: "London", customer: c1)
a2 = insert(:address, city: "New York", customer: c2)
a3 = insert(:address, city: "Tokyo", customer: c3)
p1 = insert(:payment, customer: c2, charged_at: days_ago(11))
p2 = insert(:payment, customer: c2, charged_at: days_ago(6))
p3 = insert(:payment, customer: c2, charged_at: days_ago(8))
p4 = insert(:payment, customer: c2, amount: 2, charged_at: days_ago(12))
p5 = insert(:payment, customer: c2, amount: 2, charged_at: nil)
p6 = insert(:payment, customer: c1, amount: 3, charged_at: days_ago(13))
p7 = insert(:payment, customer: c1, amount: 2, charged_at: days_ago(10))
p8 = insert(:payment, customer: c1, amount: 4, charged_at: days_ago(9))
p9 = insert(:payment, customer: c1, amount: 5, charged_at: days_ago(4))
p10 = insert(:payment, customer: c3, charged_at: days_ago(3))
p11 = insert(:payment, customer: c3, charged_at: days_ago(7))
p12 = insert(:payment, customer: c3, charged_at: days_ago(2))
p13 = insert(:payment, customer: c3, charged_at: days_ago(5))
p14 = insert(:payment, customer: c3, charged_at: nil)
{:ok,
customers: {c1, c2, c3},
addresses: {a1, a2, a3},
payments: {p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14}}
end
defp payments_by_status(status, direction \\ :asc) do
from(
p in Payment,
where: p.status == ^status,
order_by: [{^direction, p.charged_at}, {^direction, p.id}],
select: p
)
end
defp payments_by_amount_and_charged_at(amount_direction, charged_at_direction) do
from(
p in Payment,
order_by: [
{^amount_direction, p.amount},
{^charged_at_direction, p.charged_at},
{:asc, p.id}
],
select: p
)
end
defp payments_by_charged_at(direction \\ :asc) do
from(
p in Payment,
order_by: [{^direction, p.charged_at}, {^direction, p.id}],
select: p
)
end
defp payments_by_customer_name(payment_id_direction \\ :asc, customer_name_direction \\ :asc) do
from(
p in Payment,
as: :payments,
join: c in assoc(p, :customer),
as: :customer,
preload: [customer: c],
select: p,
order_by: [
{^customer_name_direction, c.name},
{^payment_id_direction, p.id}
]
)
end
defp payments_by_address_city(payment_id_direction \\ :asc, address_city_direction \\ :asc) do
from(
p in Payment,
as: :payments,
join: c in assoc(p, :customer),
as: :customer,
join: a in assoc(c, :address),
as: :address,
preload: [customer: {c, address: a}],
select: p,
order_by: [
{^address_city_direction, a.city},
{^payment_id_direction, p.id}
]
)
end
defp customer_payments_by_charged_at_and_amount(customer, direction \\ :asc) do
from(
p in Payment,
where: p.customer_id == ^customer.id,
order_by: [{^direction, p.charged_at}, {^direction, p.amount}, {^direction, p.id}]
)
end
defp encode_cursor(value) do
Cursor.encode(value)
end
defp encode_legacy_cursor(value) when is_list(value) do
value
|> :erlang.term_to_binary()
|> Base.url_encode64()
end
defp days_ago(days) do
DT.add!(DateTime.utc_now(), -(days * 86400))
end
end
| 35.705758 | 106 | 0.562297 |
9e9bd4c2b3acf85eb970713631c873f4e8188b09 | 758 | ex | Elixir | lib/binance/ticker.ex | dwarvesf/ex_binance | ed55e4b363c1cca54401b3b7d0e76c34e8797877 | [
"MIT"
] | 52 | 2018-01-16T23:38:06.000Z | 2022-02-14T11:05:47.000Z | lib/binance/ticker.ex | Cinderella-Man/binance.ex | 8735ed9582f8aebb947f766e53d2418060f69197 | [
"MIT"
] | 56 | 2018-02-24T15:10:29.000Z | 2022-03-28T19:05:41.000Z | lib/binance/ticker.ex | Cinderella-Man/binance.ex | 8735ed9582f8aebb947f766e53d2418060f69197 | [
"MIT"
] | 37 | 2018-01-20T14:56:56.000Z | 2022-03-03T20:18:30.000Z | defmodule Binance.Ticker do
@moduledoc """
Struct for representing a result row as returned by /api/v1/ticker/24hr
```
defstruct [
:price_change,
:price_change_percent,
:weighted_avg_price,
:prev_close_price,
:last_price,
:bid_price,
:ask_price,
:open_price,
:high_price,
:low_price,
:volume,
:open_time,
:close_time,
:first_id,
:last_id,
:count
]
```
"""
defstruct [
:price_change,
:price_change_percent,
:weighted_avg_price,
:prev_close_price,
:last_price,
:bid_price,
:ask_price,
:open_price,
:high_price,
:low_price,
:volume,
:open_time,
:close_time,
:first_id,
:last_id,
:count
]
use ExConstructor
end
| 15.791667 | 73 | 0.612137 |
9e9be61a5a6824cddedd71fb23cbf7ae9412b6c6 | 17,839 | ex | Elixir | lib/tesla/adapter/gun.ex | DubberSoftware/tesla | 35db1a1687db2b7ac9d68613aa257d5a9a322a88 | [
"MIT"
] | null | null | null | lib/tesla/adapter/gun.ex | DubberSoftware/tesla | 35db1a1687db2b7ac9d68613aa257d5a9a322a88 | [
"MIT"
] | null | null | null | lib/tesla/adapter/gun.ex | DubberSoftware/tesla | 35db1a1687db2b7ac9d68613aa257d5a9a322a88 | [
"MIT"
] | null | null | null | if Code.ensure_loaded?(:gun) do
defmodule Tesla.Adapter.Gun do
@moduledoc """
Adapter for [gun](https://github.com/ninenines/gun).
Remember to add `{:gun, "~> 1.3"}`, `{:idna, "~> 6.0"}` and `{:castore, "~> 0.1"}` to dependencies.
In version 1.3 gun sends `host` header with port. Fixed in master branch.
Also, you need to recompile tesla after adding `:gun` dependency:
```
mix deps.clean tesla
mix deps.compile tesla
```
## Examples
```
# set globally in config/config.exs
config :tesla, :adapter, Tesla.Adapter.Gun
# set per module
defmodule MyClient do
use Tesla
adapter Tesla.Adapter.Gun
end
```
## Adapter specific options
- `:timeout` - Time, while process, will wait for gun messages.
- `:body_as` - What will be returned in `%Tesla.Env{}` body key. Possible values:
- `:plain` - as binary (default).
- `:stream` - as stream.
If you don't want to close connection (because you want to reuse it later)
pass `close_conn: false` in adapter opts.
- `:chunks` - as chunks.
You can get response body in chunks using `Tesla.Adapter.Gun.read_chunk/3` function.
Processing of the chunks and checking body size must be done by yourself.
Example of processing function is in `test/tesla/adapter/gun_test.exs` - `Tesla.Adapter.GunTest.read_body/4`.
If you don't need connection later don't forget to close it with `Tesla.Adapter.Gun.close/1`.
- `:max_body` - Max response body size in bytes.
Works only with `body_as: :plain`, with other settings you need to check response body size by yourself.
- `:conn` - Opened connection pid with gun. Is used for reusing gun connections.
- `:close_conn` - Close connection or not after receiving full response body.
Is used for reusing gun connections. Defaults to `true`.
- `:certificates_verification` - Add SSL certificates verification.
[erlang-certifi](https://github.com/certifi/erlang-certifi)
[ssl_verify_fun.erl](https://github.com/deadtrickster/ssl_verify_fun.erl)
- `:proxy` - Proxy for requests.
**Socks proxy are supported only for gun master branch**.
Examples: `{'localhost', 1234}`, `{{127, 0, 0, 1}, 1234}`, `{:socks5, 'localhost', 1234}`.
**NOTE:** By default GUN uses TLS as transport if the specified port is 443,
if TLS is required for proxy connection on another port please specify transport
using the Gun options below otherwise tcp will be used.
- `:proxy_auth` - Auth to be passed along with the proxy opt.
Supports Basic auth for regular and Socks proxy.
Format: `{proxy_username, proxy_password}`.
## [Gun options](https://ninenines.eu/docs/en/gun/1.3/manual/gun/)
- `:connect_timeout` - Connection timeout.
- `:http_opts` - Options specific to the HTTP protocol.
- `:http2_opts` - Options specific to the HTTP/2 protocol.
- `:protocols` - Ordered list of preferred protocols.
Defaults: `[:http2, :http]`- for :tls, `[:http]` - for :tcp.
- `:trace` - Whether to enable dbg tracing of the connection process.
Should only be used during debugging. Default: false.
- `:transport` - Whether to use TLS or plain TCP.
The default varies depending on the port used.
Port 443 defaults to tls. All other ports default to tcp.
- `:transport_opts` - Transport options.
They are TCP options or TLS options depending on the selected transport.
Default: `[]`. Gun version: 1.3.
- `:tls_opts` - TLS transport options.
Default: `[]`. Gun from master branch.
- `:tcp_opts` - TCP trasnport options.
Default: `[]`. Gun from master branch.
- `:socks_opts` - Options for socks.
Default: `[]`. Gun from master branch.
- `:ws_opts` - Options specific to the Websocket protocol. Default: `%{}`.
- `:compress` - Whether to enable permessage-deflate compression.
This does not guarantee that compression will be used as it is the server
that ultimately decides. Defaults to false.
- `:protocols` - A non-empty list enables Websocket protocol negotiation.
The list of protocols will be sent in the sec-websocket-protocol request header.
The handler module interface is currently undocumented and must be set to `gun_ws_h`.
"""
@behaviour Tesla.Adapter
alias Tesla.Multipart
# TODO: update list after update to gun 2.0
@gun_keys [
:connect_timeout,
:http_opts,
:http2_opts,
:protocols,
:retry,
:retry_timeout,
:trace,
:transport,
:socks_opts,
:ws_opts
]
@default_timeout 1_000
@impl Tesla.Adapter
def call(env, opts) do
with {:ok, status, headers, body} <- request(env, opts) do
{:ok, %{env | status: status, headers: format_headers(headers), body: body}}
end
end
@doc """
Reads chunk of the response body.
Returns `{:fin, binary()}` if all body received, otherwise returns `{:nofin, binary()}`.
"""
@spec read_chunk(pid(), reference(), keyword() | map()) ::
{:fin, binary()} | {:nofin, binary()} | {:error, atom()}
def read_chunk(pid, stream, opts) do
with {status, _} = chunk when status in [:fin, :error] <- do_read_chunk(pid, stream, opts) do
if opts[:close_conn], do: close(pid)
chunk
end
end
defp do_read_chunk(pid, stream, opts) do
receive do
{:gun_data, ^pid, ^stream, :fin, body} ->
{:fin, body}
{:gun_data, ^pid, ^stream, :nofin, part} ->
{:nofin, part}
{:DOWN, _, _, _, reason} ->
{:error, reason}
after
opts[:timeout] || @default_timeout ->
{:error, :recv_chunk_timeout}
end
end
@doc """
Brutally close the `gun` connection.
"""
@spec close(pid()) :: :ok
defdelegate close(pid), to: :gun
defp format_headers(headers) do
for {key, value} <- headers do
{String.downcase(to_string(key)), to_string(value)}
end
end
defp request(env, opts) do
request(
Tesla.Adapter.Shared.format_method(env.method),
Tesla.build_url(env.url, env.query),
format_headers(env.headers),
env.body || "",
Tesla.Adapter.opts(
[close_conn: true, body_as: :plain, send_body: :at_once, receive: true],
env,
opts
)
|> Enum.into(%{})
)
end
defp request(method, url, headers, %Stream{} = body, opts),
do: do_request(method, url, headers, body, Map.put(opts, :send_body, :stream))
defp request(method, url, headers, body, opts) when is_function(body),
do: do_request(method, url, headers, body, Map.put(opts, :send_body, :stream))
defp request(method, url, headers, %Multipart{} = mp, opts) do
headers = headers ++ Multipart.headers(mp)
body = Multipart.body(mp)
do_request(method, url, headers, body, Map.put(opts, :send_body, :stream))
end
defp request(method, url, headers, body, opts),
do: do_request(method, url, headers, body, opts)
defp do_request(method, url, headers, body, opts) do
uri = URI.parse(url)
path = Tesla.Adapter.Shared.prepare_path(uri.path, uri.query)
with {:ok, pid, opts} <- open_conn(uri, opts) do
stream = open_stream(pid, method, path, headers, body, opts)
response = read_response(pid, stream, opts)
if opts[:close_conn] and opts[:body_as] not in [:stream, :chunks] do
close(pid)
end
response
end
end
@dialyzer [{:nowarn_function, open_conn: 2}, :no_match]
defp open_conn(%{scheme: scheme, host: host, port: port}, %{conn: conn} = opts)
when is_pid(conn) do
info = :gun.info(conn)
conn_scheme =
case info do
# gun master branch support, which has `origin_scheme` in connection info
%{origin_scheme: scheme} ->
scheme
%{transport: :tls} ->
"https"
_ ->
"http"
end
conn_host =
case :inet.ntoa(info.origin_host) do
{:error, :einval} -> info.origin_host
ip -> ip
end
if conn_scheme == scheme and to_string(conn_host) == host and info.origin_port == port do
{:ok, conn, Map.put(opts, :receive, false)}
else
{:error, :invalid_conn}
end
end
defp open_conn(uri, opts) do
opts = maybe_add_transport(uri, opts)
tls_opts =
if uri.scheme == "https" do
opts
|> fetch_tls_opts()
|> maybe_add_verify_options(opts, uri)
else
[]
end
gun_opts = Map.take(opts, @gun_keys)
with {:ok, conn} <- do_open_conn(uri, opts, gun_opts, tls_opts) do
{:ok, conn, opts}
end
end
# In case of a proxy being used the transport opt for initial gun open must be in accordance with the proxy host and port
# and not force TLS
defp maybe_add_transport(_, %{proxy: proxy_opts} = opts) when not is_nil(proxy_opts), do: opts
defp maybe_add_transport(%URI{scheme: "https"}, opts), do: Map.put(opts, :transport, :tls)
defp maybe_add_transport(_, opts), do: opts
# Support for gun master branch where transport_opts, were splitted to tls_opts and tcp_opts
# https://github.com/ninenines/gun/blob/491ddf58c0e14824a741852fdc522b390b306ae2/doc/src/manual/gun.asciidoc#changelog
# TODO: remove after update to gun 2.0
defp fetch_tls_opts(%{tls_opts: tls_opts}) when is_list(tls_opts), do: tls_opts
defp fetch_tls_opts(%{transport_opts: tls_opts}) when is_list(tls_opts), do: tls_opts
defp fetch_tls_opts(_), do: []
defp maybe_add_verify_options(tls_opts, %{certificates_verification: true}, %{host: host}) do
charlist =
host
|> to_charlist()
|> :idna.encode()
security_opts = [
verify: :verify_peer,
cacertfile: CAStore.file_path(),
depth: 20,
reuse_sessions: false,
verify_fun: {&:ssl_verify_hostname.verify_fun/3, [check_hostname: charlist]}
]
Keyword.merge(security_opts, tls_opts)
end
defp maybe_add_verify_options(tls_opts, _, _), do: tls_opts
@dialyzer [{:nowarn_function, do_open_conn: 4}, :no_match]
defp do_open_conn(uri, %{proxy: {proxy_host, proxy_port}} = opts, gun_opts, tls_opts) do
connect_opts =
uri
|> tunnel_opts()
|> tunnel_tls_opts(uri.scheme, tls_opts)
|> add_proxy_auth_credentials(opts)
with {:ok, pid} <- :gun.open(proxy_host, proxy_port, gun_opts),
{:ok, _} <- :gun.await_up(pid),
stream <- :gun.connect(pid, connect_opts),
{:response, :fin, 200, _} <- :gun.await(pid, stream) do
{:ok, pid}
else
{:response, :nofin, 403, _} -> {:error, :unauthorized}
{:response, :nofin, 407, _} -> {:error, :proxy_auth_failed}
error -> error
end
end
defp do_open_conn(
uri,
%{proxy: {proxy_type, proxy_host, proxy_port}} = opts,
gun_opts,
tls_opts
) do
version =
proxy_type
|> to_string()
|> String.last()
|> case do
"4" -> 4
_ -> 5
end
socks_opts =
uri
|> tunnel_opts()
|> tunnel_tls_opts(uri.scheme, tls_opts)
|> Map.put(:version, version)
|> add_socks_proxy_auth_credentials(opts)
gun_opts =
gun_opts
|> Map.put(:protocols, [:socks])
|> Map.update(:socks_opts, socks_opts, &Map.merge(socks_opts, &1))
with {:ok, pid} <- :gun.open(proxy_host, proxy_port, gun_opts),
{:ok, _} <- :gun.await_up(pid) do
{:ok, pid}
else
{:error, {:options, {:protocols, [:socks]}}} ->
{:error, "socks protocol is not supported"}
error ->
error
end
end
defp do_open_conn(uri, opts, gun_opts, tls_opts) do
tcp_opts = Map.get(opts, :tcp_opts, [])
# if gun used from master
opts_with_master_keys =
gun_opts
|> Map.put(:tls_opts, tls_opts)
|> Map.put(:tcp_opts, tcp_opts)
host = domain_or_ip(uri.host)
with {:ok, pid} <- gun_open(host, uri.port, opts_with_master_keys, opts) do
{:ok, pid}
else
{:error, {:options, {key, _}}} when key in [:tcp_opts, :tls_opts] ->
gun_open(host, uri.port, Map.put(gun_opts, :transport_opts, tls_opts), opts)
error ->
error
end
end
@dialyzer [{:nowarn_function, gun_open: 4}, :no_match]
defp gun_open(host, port, gun_opts, opts) do
with {:ok, pid} <- :gun.open(host, port, gun_opts),
{_, true, _} <- {:receive, opts[:receive], pid},
{_, {:ok, _}, _} <- {:up, :gun.await_up(pid), pid} do
{:ok, pid}
else
{:receive, false, pid} ->
{:ok, pid}
{:up, error, pid} ->
close(pid)
error
error ->
error
end
end
defp tunnel_opts(uri) do
host = domain_or_ip(uri.host)
%{host: host, port: uri.port}
end
defp tunnel_tls_opts(opts, "https", tls_opts) do
http2_opts = %{protocols: [:http2], transport: :tls, tls_opts: tls_opts}
Map.merge(opts, http2_opts)
end
defp tunnel_tls_opts(opts, _, _), do: opts
defp add_proxy_auth_credentials(opts, %{proxy_auth: {username, password}})
when is_binary(username) and is_binary(password),
do: Map.merge(opts, %{username: username, password: password})
defp add_proxy_auth_credentials(opts, _), do: opts
defp add_socks_proxy_auth_credentials(opts, %{proxy_auth: {username, password}})
when is_binary(username) and is_binary(password),
do: Map.put(opts, :auth, {:username_password, username, password})
defp add_socks_proxy_auth_credentials(opts, _), do: opts
defp open_stream(pid, method, path, headers, body, opts) do
req_opts = %{reply_to: opts[:reply_to] || self()}
open_stream(pid, method, path, headers, body, req_opts, opts[:send_body])
end
defp open_stream(pid, method, path, headers, body, req_opts, :stream) do
stream = :gun.headers(pid, method, path, headers, req_opts)
for data <- body, do: :ok = :gun.data(pid, stream, :nofin, data)
:gun.data(pid, stream, :fin, "")
stream
end
defp open_stream(pid, method, path, headers, body, req_opts, :at_once),
do: :gun.request(pid, method, path, headers, body, req_opts)
defp read_response(pid, stream, opts) do
receive? = opts[:receive]
receive do
{:gun_response, ^pid, ^stream, :fin, status, headers} ->
{:ok, status, headers, ""}
{:gun_response, ^pid, ^stream, :nofin, status, headers} ->
format_response(pid, stream, opts, status, headers, opts[:body_as])
{:gun_up, ^pid, _protocol} when receive? ->
read_response(pid, stream, opts)
{:gun_error, ^pid, reason} ->
{:error, reason}
{:gun_down, ^pid, _, _, _, _} when receive? ->
read_response(pid, stream, opts)
{:DOWN, _, _, _, reason} ->
{:error, reason}
after
opts[:timeout] || @default_timeout ->
{:error, :recv_response_timeout}
end
end
defp format_response(pid, stream, opts, status, headers, :plain) do
case read_body(pid, stream, opts) do
{:ok, body} ->
{:ok, status, headers, body}
{:error, error} ->
# prevent gun sending messages to owner process, if body is too large and connection is not closed
:ok = :gun.flush(stream)
{:error, error}
end
end
defp format_response(pid, stream, opts, status, headers, :stream) do
stream_body =
Stream.resource(
fn -> %{pid: pid, stream: stream} end,
fn
%{pid: pid, stream: stream} ->
case read_chunk(pid, stream, opts) do
{:nofin, part} -> {[part], %{pid: pid, stream: stream}}
{:fin, body} -> {[body], %{pid: pid, final: :fin}}
end
%{pid: pid, final: :fin} ->
{:halt, %{pid: pid}}
end,
fn %{pid: pid} ->
if opts[:close_conn], do: close(pid)
end
)
{:ok, status, headers, stream_body}
end
defp format_response(pid, stream, opts, status, headers, :chunks) do
{:ok, status, headers, %{pid: pid, stream: stream, opts: Enum.into(opts, [])}}
end
defp read_body(pid, stream, opts, acc \\ "") do
limit = opts[:max_body]
receive do
{:gun_data, ^pid, ^stream, :fin, body} ->
check_body_size(acc, body, limit)
{:gun_data, ^pid, ^stream, :nofin, part} ->
with {:ok, acc} <- check_body_size(acc, part, limit) do
read_body(pid, stream, opts, acc)
end
{:DOWN, _, _, _, reason} ->
{:error, reason}
after
opts[:timeout] || @default_timeout ->
{:error, :recv_body_timeout}
end
end
defp check_body_size(acc, part, nil), do: {:ok, acc <> part}
defp check_body_size(acc, part, limit) do
body = acc <> part
if limit - byte_size(body) >= 0 do
{:ok, body}
else
{:error, :body_too_large}
end
end
defp domain_or_ip(host) do
charlist = to_charlist(host)
case :inet.parse_address(charlist) do
{:error, :einval} ->
:idna.encode(charlist)
{:ok, ip} ->
ip
end
end
end
end
| 31.969534 | 125 | 0.591513 |
9e9c16aba8400942bae9395b57e575f31cc27e14 | 314 | exs | Elixir | test/phoenix/project_test.exs | fishcakez/phoenix | 97fbd73a475ae918ef29a87ad580ab2ab6d967d2 | [
"MIT"
] | null | null | null | test/phoenix/project_test.exs | fishcakez/phoenix | 97fbd73a475ae918ef29a87ad580ab2ab6d967d2 | [
"MIT"
] | null | null | null | test/phoenix/project_test.exs | fishcakez/phoenix | 97fbd73a475ae918ef29a87ad580ab2ab6d967d2 | [
"MIT"
] | null | null | null | defmodule Phoenix.ProjectTest do
use ExUnit.Case
alias Phoenix.Project
test "root_module/0 returns the root module from Mix" do
assert Project.module_root == :Phoenix
end
test "modules/0 returns a Stream of all modules in project" do
assert Project.modules |> Enum.all?(&is_atom(&1))
end
end
| 24.153846 | 64 | 0.732484 |
9e9c3d8c2e04a3400d193460f53e6ef052509bb0 | 108 | ex | Elixir | apps/speedrun/lib/speedrun/repo.ex | brunobamaral/speedrun | b439e03a0a06c53c03824cb8a37fd55e4aef8e35 | [
"Apache-2.0"
] | null | null | null | apps/speedrun/lib/speedrun/repo.ex | brunobamaral/speedrun | b439e03a0a06c53c03824cb8a37fd55e4aef8e35 | [
"Apache-2.0"
] | null | null | null | apps/speedrun/lib/speedrun/repo.ex | brunobamaral/speedrun | b439e03a0a06c53c03824cb8a37fd55e4aef8e35 | [
"Apache-2.0"
] | null | null | null | defmodule Speedrun.Repo do
use Ecto.Repo,
otp_app: :speedrun,
adapter: Ecto.Adapters.Postgres
end
| 18 | 35 | 0.731481 |
9e9c8edf8bce16c0f9e6b5801b0ff5012ec8b7ba | 6,614 | ex | Elixir | lib/oli_web/views/page_delivery_view.ex | jrissler/oli-torus | 747f9e4360163d76a6ca5daee3aab1feab0c99b1 | [
"MIT"
] | 1 | 2022-03-17T20:35:47.000Z | 2022-03-17T20:35:47.000Z | lib/oli_web/views/page_delivery_view.ex | jrissler/oli-torus | 747f9e4360163d76a6ca5daee3aab1feab0c99b1 | [
"MIT"
] | 9 | 2021-11-02T16:52:09.000Z | 2022-03-25T15:14:01.000Z | lib/oli_web/views/page_delivery_view.ex | marc-hughes/oli-torus-1 | aa3c9bb2d91b678a365be839761eaf86c60ee35c | [
"MIT"
] | null | null | null | defmodule OliWeb.PageDeliveryView do
use OliWeb, :view
use Phoenix.Component
alias Oli.Resources.ResourceType
alias Oli.Resources.Numbering
alias Oli.Delivery.Hierarchy.HierarchyNode
alias OliWeb.Router.Helpers, as: Routes
alias Oli.Delivery.Attempts.Core
alias Oli.Resources.Revision
import Oli.Utils, only: [value_or: 2]
def show_score(nil, nil), do: ""
def show_score(score, out_of) do
cond do
out_of <= 0.0 ->
"0"
true ->
(score / out_of * 100)
|> round
|> Integer.to_string()
end
end
defp url_from_desc(conn, %{"type" => "container", "slug" => slug}),
do: conn.assigns.container_link_url.(slug)
defp url_from_desc(conn, %{"type" => "page", "slug" => slug}),
do: conn.assigns.page_link_url.(slug)
def previous_url(conn) do
url_from_desc(conn, conn.assigns.previous_page)
end
def previous_url(conn, %{"slug" => slug} = previous_page, preview_mode, section_slug) do
Routes.page_delivery_path(conn, action(preview_mode, previous_page), section_slug, slug)
end
def previous_title(%{"title" => title}) do
title
end
def next_url(conn) do
url_from_desc(conn, conn.assigns.next_page)
end
def next_url(conn, %{"slug" => slug} = next_page, preview_mode, section_slug) do
Routes.page_delivery_path(conn, action(preview_mode, next_page), section_slug, slug)
end
def next_title(%{"title" => title}) do
title
end
def prev_link(%{to: path, title: title} = assigns) do
~H"""
<%= link to: path, class: "page-nav-link btn", onclick: assigns[:onclick] do %>
<div class="d-flex flex-row">
<div>
<i class="fas fa-arrow-left nav-icon"></i>
</div>
<div class="d-flex flex-column flex-fill flex-ellipsis-fix text-right">
<div class="nav-label"><%= value_or(assigns[:label], "Previous") %></div>
<div class="nav-title"><%= title %></div>
</div>
</div>
<% end %>
"""
end
def next_link(%{to: path, title: title} = assigns) do
~H"""
<%= link to: path, class: "page-nav-link btn", onclick: assigns[:onclick] do %>
<div class="d-flex flex-row">
<div class="d-flex flex-column flex-fill flex-ellipsis-fix text-left">
<div class="nav-label"><%= value_or(assigns[:label], "Next") %></div>
<div class="nav-title"><%= title %></div>
</div>
<div>
<i class="fas fa-arrow-right nav-icon"></i>
</div>
</div>
<% end %>
"""
end
def action(preview_mode, %Revision{} = revision), do: action(preview_mode, container?(revision))
def action(preview_mode, %{"type" => type}), do: action(preview_mode, type == "container")
def action(preview_mode, is_container) when is_boolean(is_container) do
case {preview_mode, is_container} do
{true, true} ->
:container_preview
{true, false} ->
:page_preview
{false, true} ->
:container
{false, false} ->
:page
end
end
def container?(rev) do
ResourceType.get_type_by_id(rev.resource_type_id) == "container"
end
def container_title(
%HierarchyNode{
numbering: %Numbering{
level: level,
index: index
},
revision: revision
},
display_curriculum_item_numbering \\ true
) do
if display_curriculum_item_numbering,
do: "#{Numbering.container_type(level)} #{index}: #{revision.title}",
else: "#{Numbering.container_type(level)}: #{revision.title}"
end
def has_submitted_attempt?(resource_access) do
case {resource_access.score, resource_access.out_of} do
{nil, nil} ->
# resource was accessed but no attempt was submitted
false
{_score, _out_of} ->
true
end
end
def encode_pages(conn, section_slug, hierarchy) do
Oli.Delivery.Hierarchy.flatten_pages(hierarchy)
|> Enum.map(fn %{revision: revision} ->
%{
slug: revision.slug,
url: Routes.page_delivery_path(conn, :page, section_slug, revision.slug),
graded: revision.graded
}
end)
|> Jason.encode!()
|> Base.encode64()
end
def encode_url(url) do
Jason.encode!(%{"url" => url})
|> Base.encode64()
end
def encode_activity_attempts(registered_activity_slug_map, latest_attempts) do
Map.keys(latest_attempts)
|> Enum.map(fn activity_id ->
encode_attempt(registered_activity_slug_map, Map.get(latest_attempts, activity_id))
end)
|> Enum.filter(fn data -> !is_nil(data) end)
|> Jason.encode!()
|> Base.encode64()
end
# We only encode activity attempts for basic pages, when a full attempt hiearchy is present here as
# the second argument. These entries will be in the shape
# of two element tuples.
defp encode_attempt(registered_activity_slug_map, {activity_attempt, part_attempts_map}) do
{:ok, model} = Core.select_model(activity_attempt) |> Oli.Activities.Model.parse()
state =
Oli.Activities.State.ActivityState.from_attempt(
activity_attempt,
Map.values(part_attempts_map),
model
)
activity_type_slug =
Map.get(registered_activity_slug_map, activity_attempt.revision.activity_type_id)
state
|> Map.from_struct()
|> Map.put(
:answers,
Oli.Utils.LoadTesting.provide_answers(
activity_type_slug,
Core.select_model(activity_attempt)
)
)
end
# The thin attempt hierarchy will be present when the rendered page is an adaptive page. This is simply a map
# (and doesn't match the shape above). We do not support exercising of adaptive pages from the load
# testing framework. Therefore, we return nil, which will be filtered out and ultimately the
# __ACTIVITY_ATTEMPT__ load testing page variable will be an empty list.
defp encode_attempt(_, _) do
nil
end
def calculate_score_percentage(resource_access) do
case {resource_access.score, resource_access.out_of} do
{nil, nil} ->
# resource was accessed but no attempt was submitted
""
{score, out_of} ->
if out_of != 0 do
percent =
(score / out_of * 100)
|> round
|> Integer.to_string()
percent <> "%"
else
"0%"
end
end
end
def base_resource_link_class(), do: ""
def resource_link_class(_active = true), do: base_resource_link_class() <> " active"
def resource_link_class(_active = false), do: base_resource_link_class()
end
| 29.136564 | 111 | 0.632749 |
9e9ca37d0038a5138439930c313f329706115758 | 9,425 | ex | Elixir | lib/xattr.ex | SoftwareMansion/elixir-xattr | 3deb77e42bb599c40448703c4e40e85566351b85 | [
"MIT"
] | null | null | null | lib/xattr.ex | SoftwareMansion/elixir-xattr | 3deb77e42bb599c40448703c4e40e85566351b85 | [
"MIT"
] | 2 | 2019-03-16T18:42:23.000Z | 2019-03-17T10:17:13.000Z | lib/xattr.ex | SoftwareMansion/elixir-xattr | 3deb77e42bb599c40448703c4e40e85566351b85 | [
"MIT"
] | 1 | 2019-03-16T18:39:44.000Z | 2019-03-16T18:39:44.000Z | defmodule Xattr do
import Xattr.Nif
@moduledoc ~S"""
API module for accessing custom extended filesystem attributes.
Attributes managed by this module are stored in isolation, in custom namespace.
Because implementation concepts of extended attributes differ in supported
platforms, it would not be possible to provide unified API which could cover
specific use cases.
Some kernels and filesystems may place various limits on extended attributes
functionality, and so it is to use them only to store few, short metadata which
is not crucial to application functionality.
## Implementation
Elixir Xattr is implemented as NIF library with two platform-dependent
backends:
* *Xattr* - Unix extended attributes supported by Linux and macOS
* *Windows* - alternate data streams available in Windows/NTFS
### Xattr
This backed works as an Erlang wrapper for [`xattr(7)`](http://man7.org/linux/man-pages/man7/xattr.7.html)
functionality available in Unix world. Attributes are always prefixed with
`user.ElixirXattr` namespace.
### Windows
On Windows, NTFS has a feature called [*Alternate Data Streams*](https://blogs.technet.microsoft.com/askcore/2013/03/24/alternate-data-streams-in-ntfs/).
Briefly: a file can have many contents.
Attributes are stored in `ElixirXattr` data stream, which is automatically
created when setting an attribute and the stream does not exist. They are
saved in simple binary format, as a contiguous list of *size:data* cells:
```txt
v - name C-string size v - value binary size
+---+------------+---+-----------+---+----------+---+-------+
| 5 | n a m e \0 | 5 | v a l u e | 4 | f o o \0 | 3 | b a r | ...
+---+------------+---+-----------+---+----------+---+-------+
^ - name C-string, note \0 suffix ^ - value binary data
```
### Unicode
Unicode filenames are supported (and as such proper encoding conversions
are performed when needed).
Both names nor values are not processed and stored as-is.
### Attribute name types
Because attribute names can be represented by various Erlang types, they
are prefixed with *type tags* during serialization:
* `a$` - atoms
* `s$` - name
For example, given Xattr backend, call `Xattr.set("foo.txt", "example", "value")`
will create `user.ElixirXattr.s$example` extended attribute on file `foo.txt`.
### Extended attributes & file system links
On both Unix and Windows implementations, attribute storage is attached to
file system data, not file/link entries. Therefore attributes are shared
between all hard links / file and its symlinks.
## Errors
Because of the nature of error handling on both Unix and Windows, only specific
error codes are translated to atoms. Other codes are stringified to some human
readable name, on Unix using [`strerror`](https://linux.die.net/man/3/strerror)
and on Windows to form `'Windows Error {hexadecimal error code}'` (Windows
version of strerror returns localized messages on non-English installations).
Following errors are represented as atoms and as such can be pattern matched:
* `:enoattr` - attribute was not found
* `:enotsup` - extended attributes are not supported for this file
* `:enoent` - file does not exist
* `:invalfmt` - attribute storage is corrupted and should be regenerated
"""
@tag_atom "a$"
@tag_str "s$"
@type name_t :: String.t() | atom
@doc """
Lists names of all extended attributes of `path`.
The order of items in returned list is unspecified. If given `path` has no
attributes, `{:ok, []}` is returned.
## Example
Xattr.set("foo.txt", "hello", "world")
Xattr.set("foo.txt", :foo, "bar")
{:ok, list} = Xattr.ls("foo.txt")
# list should be permutation of ["hello", :foo]
"""
@spec ls(Path.t()) :: {:ok, [name_t]} | {:error, term}
def ls(path) do
path = IO.chardata_to_string(path) <> <<0>>
with {:ok, lst} <- listxattr_nif(path) do
decode_list(lst)
end
end
@doc """
The same as `ls/1`, but raises an exception if it fails.
"""
@spec ls!(Path.t()) :: [name_t] | no_return
def ls!(path) do
case ls(path) do
{:ok, result} ->
result
{:error, reason} ->
raise Xattr.Error,
reason: reason,
action: "list all extended attributes of",
path: IO.chardata_to_string(path)
end
end
@doc """
Checks whether `path` has extended attribute `name`.
## Example
Xattr.set("foo.txt", "hello", "world")
Xattr.has("foo.txt", "hello") == {:ok, true}
Xattr.has("foo.txt", :foo) == {:ok, false}
"""
@spec has(Path.t(), name :: name_t) :: {:ok, boolean} | {:error, term}
def has(path, name) when is_binary(name) or is_atom(name) do
path = IO.chardata_to_string(path) <> <<0>>
name = encode_name(name) <> <<0>>
hasxattr_nif(path, name)
end
@doc """
The same as `has/2`, but raises an exception if it fails.
"""
@spec has!(Path.t(), name :: name_t) :: boolean | no_return
def has!(path, name) do
case has(path, name) do
{:ok, result} ->
result
{:error, reason} ->
raise Xattr.Error,
reason: reason,
action: "check attribute existence of",
path: IO.chardata_to_string(path)
end
end
@doc """
Gets extended attribute value.
If attribute `name` does not exist, `{:error, :enoattr}` is returned.
## Example
Xattr.set("foo.txt", "hello", "world")
Xattr.get("foo.txt", "hello") == {:ok, "world"}
Xattr.get("foo.txt", :foo) == {:error, :enoattr}
"""
@spec get(Path.t(), name :: name_t) :: {:ok, binary} | {:error, term}
def get(path, name) when is_binary(name) or is_atom(name) do
path = IO.chardata_to_string(path) <> <<0>>
name = encode_name(name) <> <<0>>
getxattr_nif(path, name)
end
@doc """
The same as `get/2`, but raises an exception if it fails.
"""
@spec get!(Path.t(), name :: name_t) :: binary | no_return
def get!(path, name) do
case get(path, name) do
{:ok, result} ->
result
{:error, reason} ->
raise Xattr.Error,
reason: reason,
action: "get attribute of",
path: IO.chardata_to_string(path)
end
end
@doc """
Sets extended attribute value.
If attribute `name` does not exist, it is created.
## Example
Xattr.set("foo.txt", "hello", "world")
Xattr.get("foo.txt", "hello") == {:ok, "world"}
"""
@spec set(Path.t(), name :: name_t, value :: binary) :: :ok | {:error, term}
def set(path, name, value)
when (is_binary(name) or is_atom(name)) and is_binary(value) do
path = IO.chardata_to_string(path) <> <<0>>
name = encode_name(name) <> <<0>>
setxattr_nif(path, name, value)
end
@doc """
The same as `set/3`, but raises an exception if it fails.
"""
@spec set!(Path.t(), name :: name_t, value :: binary) :: :ok | no_return
def set!(path, name, value) do
case set(path, name, value) do
:ok ->
:ok
{:error, reason} ->
raise Xattr.Error,
reason: reason,
action: "remove attribute of",
path: IO.chardata_to_string(path)
end
end
@doc """
Removes extended attribute.
If attribute `name` does not exist, `{:error, :enoattr}` is returned.
## Example
Xattr.set("foo.txt", "hello", "world")
Xattr.set("foo.txt", :foo, "bar")
Xattr.rm("foo.txt", "foo")
{:ok, ["hello"]} = Xattr.ls("foo.txt")
"""
@spec rm(Path.t(), name :: name_t) :: :ok | {:error, term}
def rm(path, name) when is_binary(name) or is_atom(name) do
path = IO.chardata_to_string(path) <> <<0>>
name = encode_name(name) <> <<0>>
removexattr_nif(path, name)
end
@doc """
The same as `rm/2`, but raises an exception if it fails.
"""
@spec rm!(Path.t(), name :: name_t) :: :ok | no_return
def rm!(path, name) do
case rm(path, name) do
:ok ->
:ok
{:error, reason} ->
raise Xattr.Error,
reason: reason,
action: "remove attribute of",
path: IO.chardata_to_string(path)
end
end
defp encode_name(name) when is_atom(name) do
@tag_atom <> to_string(name)
end
defp encode_name(name) when is_binary(name) do
@tag_str <> name
end
defp decode_name(@tag_atom <> bin) do
{:ok, String.to_atom(bin)}
end
defp decode_name(@tag_str <> bin) do
{:ok, bin}
end
defp decode_name(_) do
{:error, :invalfmt}
end
defp decode_list(lst) do
decode_list(lst, {:ok, []})
end
defp decode_list([], acc) do
acc
end
defp decode_list([name_enc | rest], {:ok, lst}) do
case decode_name(name_enc) do
{:ok, name} -> decode_list(rest, {:ok, [name | lst]})
err -> err
end
end
end
defmodule Xattr.Error do
defexception [:reason, :path, action: ""]
def message(%{action: action, reason: reason, path: path}) do
formatted = fmt(action, reason)
"could not #{action} #{inspect(path)}: #{formatted}"
end
defp fmt(_action, :enoattr) do
"no such attribute"
end
defp fmt(_action, :invalfmt) do
"corrupted attribute data"
end
defp fmt(_action, reason) do
case IO.iodata_to_binary(:file.format_error(reason)) do
"unknown POSIX error" <> _ -> inspect(reason)
formatted_reason -> formatted_reason
end
end
end
| 28.82263 | 155 | 0.62313 |
9e9cc5aa04cbce87700647e973fcbda78ebb27eb | 1,832 | ex | Elixir | lib/ig/rest_client.ex | frathon/ig | 21d809ddfe389dd7a7b8ea75979510114e31dece | [
"MIT"
] | 1 | 2020-01-21T12:01:29.000Z | 2020-01-21T12:01:29.000Z | lib/ig/rest_client.ex | frathon/ig | 21d809ddfe389dd7a7b8ea75979510114e31dece | [
"MIT"
] | null | null | null | lib/ig/rest_client.ex | frathon/ig | 21d809ddfe389dd7a7b8ea75979510114e31dece | [
"MIT"
] | 1 | 2020-01-27T23:13:11.000Z | 2020-01-27T23:13:11.000Z | defmodule Ig.RestClient do
@live_endpoint "https://api.ig.com/gateway/deal"
@demo_endpoint "https://demo-api.ig.com/gateway/deal"
def login(is_demo, identifier, password, api_key) do
{:ok, %HTTPoison.Response{} = response} =
post(is_demo, '/session', %{identifier: identifier, password: password}, [
{"X-IG-API-KEY", api_key},
{"VERSION", 2}
])
case response do
%{status_code: 200} ->
response_body = Jason.decode!(response.body)
cst = Enum.find(response.headers, {nil, nil}, &(elem(&1, 0) == "CST")) |> elem(1)
security_token =
Enum.find(response.headers, {nil, nil}, &(elem(&1, 0) == "X-SECURITY-TOKEN")) |> elem(1)
{:ok,
Map.merge(response_body, %{
:cst => cst,
:security_token => security_token,
:api_key => api_key
})}
_ ->
handle_error(response)
end
end
def post(is_demo, uri, body, headers \\ []) do
make_request(:post, is_demo, uri, body, headers)
end
def get(is_demo, uri, headers \\ []) do
make_request(:get, is_demo, uri, "", headers)
end
defp make_request(method, is_demo, uri, body, headers) do
complete_headers = default_headers() ++ headers
full_url = "#{base_url(is_demo)}#{uri}"
body_string =
if method == :get do
""
else
Jason.encode!(body)
end
HTTPoison.request(method, full_url, body_string, complete_headers)
end
defp base_url(true), do: @demo_endpoint
defp base_url(_), do: @live_endpoint
defp default_headers() do
[
{"Content-Type", "application/json; charset=UTF-8"},
{"Accept", "application/json; charset=UTF-8"}
]
end
defp handle_error(%{body: body}) do
response_body = Jason.decode!(body)
{:error, response_body}
end
end
| 26.550725 | 98 | 0.599891 |
9e9ce3fbddc387757f9b0cfe6247199c443ede30 | 2,204 | exs | Elixir | config/dev.exs | Hiyori-API/checker-mal | c52f6e8a248ba160ffebc2c9369a933fc8fc4499 | [
"MIT"
] | null | null | null | config/dev.exs | Hiyori-API/checker-mal | c52f6e8a248ba160ffebc2c9369a933fc8fc4499 | [
"MIT"
] | null | null | null | config/dev.exs | Hiyori-API/checker-mal | c52f6e8a248ba160ffebc2c9369a933fc8fc4499 | [
"MIT"
] | null | null | null | import Config
# Configure your database
config :checker_mal, CheckerMal.Repo,
username: "postgres",
password: "postgres",
database: "checker_mal_dev",
hostname: "localhost",
show_sensitive_data_on_connection_error: true,
pool_size: 10
config :checker_mal,
mal_wait_time: 10
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :checker_mal, CheckerMalWeb.Endpoint,
http: [port: 4001],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :checker_mal, CheckerMalWeb.Endpoint,
live_reload: [
patterns: [
~r"priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$",
~r"priv/gettext/.*(po)$",
~r"lib/checker_mal_web/(live|views)/.*(ex)$",
~r"lib/checker_mal_web/templates/.*(eex)$"
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 27.55 | 68 | 0.696461 |
9e9d044ed9c7f7ac64b654ff768fe59420f42715 | 6,140 | ex | Elixir | lib/harald/hci/commands/le_controller/create_connection.ex | smartrent/harald | 158a69bc2b70b3f51d67bd935d223a42a3633d68 | [
"MIT"
] | 3 | 2020-08-07T02:09:09.000Z | 2020-08-28T12:25:48.000Z | lib/harald/hci/commands/le_controller/create_connection.ex | smartrent/harald | 158a69bc2b70b3f51d67bd935d223a42a3633d68 | [
"MIT"
] | null | null | null | lib/harald/hci/commands/le_controller/create_connection.ex | smartrent/harald | 158a69bc2b70b3f51d67bd935d223a42a3633d68 | [
"MIT"
] | null | null | null | defmodule Harald.HCI.Command.LEController.CreateConnection do
use Harald.HCI.Command.LEController, ocf: 0x000D
@moduledoc """
The HCI_LE_Create_Connection command is used to create an ACL connection to a
connectable advertiser
Bluetooth Core Version 5.2 | Vol 4, Part E, section 7.8.12
* OGF: `#{inspect(@ogf, base: :hex)}`
* OCF: `#{inspect(@ocf, base: :hex)}`
* Opcode: `#{inspect(@opcode)}`
The LE_Scan_Interval and LE_Scan_Window parameters are recommendations from
the Host on how long (LE_Scan_Window) and how frequently (LE_Scan_Interval)
the Controller should scan. The LE_Scan_Window parameter shall be set to a
value smaller or equal to the value set for the LE_Scan_Interval parameter. If
both are set to the same value, scanning should run continuously.
The Initiator_Filter_Policy is used to determine whether the White List is
used. If the White List is not used, the Peer_Address_Type and the
Peer_Address parameters specify the address type and address of the
advertising device to connect to.
Peer_Address_Type parameter indicates the type of address used in the
connectable advertisement sent by the peer. The Host shall not set
Peer_Address_Type to either 0x02 or 0x03 if both the Host and the Controller
support the HCI_LE_Set_Privacy_Mode command. If a Controller that supports the
HCI_LE_Set_Privacy_Mode command receives the HCI_LE_Create_Connection command
with Peer_Address_Type set to either 0x02 or 0x03, it may use either device
privacy mode or network privacy mode for that peer device.
Peer_Address parameter indicates the Peer’s Public Device Address, Random
(static) Device Address, Non-Resolvable Private Address or Resolvable Private
Address depending on the Peer_Address_Type parameter.
Own_Address_Type parameter indicates the type of address being used in the
connection request packets.
The Connection_Interval_Min and Connection_Interval_Max parameters define the
minimum and maximum allowed connection interval. The Connection_Interval_Min
parameter shall not be greater than the Connection_Interval_Max parameter.
The Connection_Latency parameter defines the maximum allowed connection latency
(see [Vol 6] Part B, Section 4.5.1).
The Supervision_Timeout parameter defines the link supervision timeout for the
connection. The Supervision_Timeout in milliseconds shall be larger than (1 +
Connection_Latency) * Connection_Interval_Max * 2, where Connection_Interval_Max
is given in milliseconds. (See [Vol 6] Part B, Section 4.5.2).
The Min_CE_Length and Max_CE_Length parameters are informative parameters
providing the Controller with the expected minimum and maximum length of the
connection events. The Min_CE_Length parameter shall be less than or equal to
the Max_CE_Length parameter.
If the Host issues this command when another HCI_LE_Create_Connection command is
pending in the Controller, the Controller shall return the error code Command
Disallowed (0x0C).
If the Own_Address_Type parameter is set to 0x01 and the random address for the
device has not been initialized, the Controller shall return the error code
Invalid HCI Command Parameters (0x12).
If the Own_Address_Type parameter is set to 0x03, the Initiator_Filter_Policy
parameter is set to 0x00, the controller's resolving list did not contain a
matching entry, and the random address for the device has not been initialized,
the Controller shall return the error code Invalid HCI Command Parameters
(0x12).
If the Own_Address_Type parameter is set to 0x03, the Initiator_Filter_Policy
parameter is set to 0x01, and the random address for the device has not been
initialized, the Controller shall return the error code Invalid HCI Command
Parameters (0x12)
"""
defparameters le_scan_interval: 0x0C80,
le_scan_window: 0x0640,
initiator_filter_policy: 0,
peer_address_type: 0,
peer_address: nil,
own_address_type: 0,
connection_interval_min: 0x0024,
connection_interval_max: 0x0C80,
connection_latency: 0x0012,
supervision_timeout: 0x0640,
min_ce_length: 0x0006,
max_ce_length: 0x0054
defimpl HCI.Serializable do
def serialize(cc) do
fields = <<
cc.le_scan_interval::16-little,
cc.le_scan_window::16-little,
cc.initiator_filter_policy::8,
cc.peer_address_type::8,
cc.peer_address::48,
cc.own_address_type::8,
cc.connection_interval_min::16-little,
cc.connection_interval_max::16-little,
cc.connection_latency::16-little,
cc.supervision_timeout::16-little,
cc.min_ce_length::16-little,
cc.max_ce_length::16-little
>>
fields_size = byte_size(fields)
<<cc.opcode::binary, fields_size, fields::binary>>
end
end
@impl Harald.HCI.Command
def deserialize(<<@opcode::binary, _fields_size, fields::binary>>) do
<<
le_scan_interval::16-little,
le_scan_window::16-little,
initiator_filter_policy::8,
peer_address_type::8,
peer_address::48,
own_address_type::8,
connection_interval_min::16-little,
connection_interval_max::16-little,
connection_latency::16-little,
supervision_timeout::16-little,
min_ce_length::16-little,
max_ce_length::16-little
>> = fields
cc = %__MODULE__{
le_scan_interval: le_scan_interval,
le_scan_window: le_scan_window,
initiator_filter_policy: initiator_filter_policy,
peer_address_type: peer_address_type,
peer_address: peer_address,
own_address_type: own_address_type,
connection_interval_min: connection_interval_min,
connection_interval_max: connection_interval_max,
connection_latency: connection_latency,
supervision_timeout: supervision_timeout,
min_ce_length: min_ce_length,
max_ce_length: max_ce_length
}
{:ok, cc}
end
@impl Harald.HCI.Command
def return_parameters(_), do: %{}
end
| 40.662252 | 82 | 0.742345 |
9e9d2f79ba504a30dbcbd1709294f48c565ee10f | 1,565 | ex | Elixir | lib/nostrum/struct/voice_state.ex | jos-b/nostrum | baf5c9de9f17c3bd99c5c06a454e03cc448aad1c | [
"MIT"
] | 1 | 2020-12-08T23:47:17.000Z | 2020-12-08T23:47:17.000Z | lib/nostrum/struct/voice_state.ex | jos-b/nostrum | baf5c9de9f17c3bd99c5c06a454e03cc448aad1c | [
"MIT"
] | null | null | null | lib/nostrum/struct/voice_state.ex | jos-b/nostrum | baf5c9de9f17c3bd99c5c06a454e03cc448aad1c | [
"MIT"
] | null | null | null | defmodule Nostrum.Struct.VoiceState do
@moduledoc false
alias Nostrum.Voice.Session
alias Porcelain.Process, as: Proc
defstruct [
:guild_id,
:channel_id,
:gateway,
:session,
:token,
:secret_key,
:session_pid,
:ssrc,
:speaking,
:ip,
:port,
:udp_socket,
:rtp_sequence,
:rtp_timestamp,
:ffmpeg_proc,
:player_pid
]
def new, do: %__MODULE__{}
def new(params), do: struct(__MODULE__, params)
def ready_for_ws?(%__MODULE__{} = v) do
not (is_pid(v.session_pid) or
is_nil(v.session) or
is_nil(v.gateway) or
is_nil(v.token))
end
def ready_for_ws?(_), do: false
def ready_for_rtp?(%__MODULE__{} = v) do
not (is_nil(v.ip) or
is_nil(v.port) or
is_nil(v.ssrc) or
is_nil(v.secret_key) or
is_nil(v.udp_socket))
end
def ready_for_rtp?(_), do: false
def playing?(%__MODULE__{} = v) do
is_pid(v.player_pid) and Process.alive?(v.player_pid)
end
def playing?(_), do: false
def cleanup(%__MODULE__{} = v) do
unless is_nil(v.player_pid) do
if Process.alive?(v.player_pid) do
Process.exit(v.player_pid, :cleanup)
end
end
unless is_nil(v.ffmpeg_proc) do
if Proc.alive?(v.ffmpeg_proc) do
Proc.stop(v.ffmpeg_proc)
end
end
unless is_nil(v.udp_socket) do
:gen_udp.close(v.udp_socket)
end
unless is_nil(v.session_pid) do
Session.close_connection(v.session_pid)
end
:ok
end
def cleanup(_), do: :ok
end
| 19.5625 | 57 | 0.61278 |
9e9d3ebf5c3911f9ec1d092211949b4580b0eafc | 162 | exs | Elixir | priv/repo/migrations/20210225080145_update_tx.exs | WeLightProject/WeLight-Portal | 6e701469423e3a62affdc415c4e8c186d603d324 | [
"MIT"
] | 2 | 2021-02-12T09:21:56.000Z | 2021-02-22T08:52:20.000Z | priv/repo/migrations/20210225080145_update_tx.exs | WeLightProject/WeLight-Portal | 6e701469423e3a62affdc415c4e8c186d603d324 | [
"MIT"
] | 4 | 2021-02-22T08:53:43.000Z | 2021-06-09T09:24:46.000Z | priv/repo/migrations/20210225080145_update_tx.exs | WeLightProject/WeLight-Portal | 6e701469423e3a62affdc415c4e8c186d603d324 | [
"MIT"
] | null | null | null | defmodule SuperIssuer.Repo.Migrations.UpdateTx do
use Ecto.Migration
def change do
alter table :tx do
add :contract_id, :integer
end
end
end
| 16.2 | 49 | 0.709877 |
9e9d574c920592dff19c8831e7c8a95bf270a60d | 2,249 | exs | Elixir | mix.exs | esl/quantum-core | 9f14e2d3cff83047f55691264c0a4040ffd3ad97 | [
"Apache-2.0"
] | null | null | null | mix.exs | esl/quantum-core | 9f14e2d3cff83047f55691264c0a4040ffd3ad97 | [
"Apache-2.0"
] | null | null | null | mix.exs | esl/quantum-core | 9f14e2d3cff83047f55691264c0a4040ffd3ad97 | [
"Apache-2.0"
] | 1 | 2022-03-06T10:24:39.000Z | 2022-03-06T10:24:39.000Z | defmodule Quantum.Mixfile do
@moduledoc false
use Mix.Project
@version "2.3.4"
def project do
[
app: :quantum,
build_embedded: Mix.env() == :prod,
deps: deps(),
description: "Cron-like job scheduler for Elixir.",
docs: docs(),
elixir: "~> 1.8",
name: "Quantum",
elixirc_paths: elixirc_paths(Mix.env()),
package: package(),
start_permanent: Mix.env() == :prod,
test_coverage: [tool: ExCoveralls],
version: @version,
dialyzer: [
ignore_warnings: "dialyzer.ignore-warnings"
]
]
end
def application do
[extra_applications: [:logger]]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp package do
%{
maintainers: [
"Constantin Rack",
"Dan Swain",
"Lenz Gschwendtner",
"Lucas Charles",
"Rodion Vshevtsov",
"Stanislav Krasnoyarov",
"Kai Faber",
"Jonatan Männchen"
],
licenses: ["Apache License 2.0"],
links: %{
"Changelog" => "https://github.com/quantum-elixir/quantum-core/blob/master/CHANGELOG.md",
"GitHub" => "https://github.com/quantum-elixir/quantum-core"
}
}
end
defp docs do
[
main: "readme",
source_ref: "v#{@version}",
source_url: "https://github.com/quantum-elixir/quantum-core",
extras: [
"README.md",
"CHANGELOG.md",
"MIGRATE-V3.md",
"pages/supervision-tree.md",
"pages/configuration.md",
"pages/runtime.md",
"pages/crontab-format.md",
"pages/run-strategies.md",
"pages/date-library.md"
]
]
end
defp deps do
[
{:tzdata, "~> 1.0", override: true},
{:crontab, "~> 1.1"},
{:gen_stage, "~> 0.14"},
{:earmark, "~> 1.0", only: [:dev, :docs], runtime: false},
{:ex_doc, "~> 0.19", only: [:dev, :docs], runtime: false},
{:excoveralls, "~> 0.5", only: [:dev, :test], runtime: false},
{:inch_ex, "~> 1.0", only: [:dev, :docs], runtime: false},
{:dialyxir, "~> 1.0-rc", only: [:dev, :test], runtime: false},
{:credo, "~> 0.7", only: [:dev, :test], runtime: false}
]
end
end
| 25.556818 | 97 | 0.54024 |
9e9db05bb3a826e8f6add1f3a3cdc4811619307c | 144 | ex | Elixir | lib/epi_locator_web/controllers/health_check_controller.ex | RatioPBC/epi-locator | 58c90500c4e0071ce365d76ec9812f9051d6a9f9 | [
"Apache-2.0"
] | null | null | null | lib/epi_locator_web/controllers/health_check_controller.ex | RatioPBC/epi-locator | 58c90500c4e0071ce365d76ec9812f9051d6a9f9 | [
"Apache-2.0"
] | 6 | 2021-10-19T01:55:57.000Z | 2022-02-15T01:04:19.000Z | lib/epi_locator_web/controllers/health_check_controller.ex | RatioPBC/epi-locator | 58c90500c4e0071ce365d76ec9812f9051d6a9f9 | [
"Apache-2.0"
] | 2 | 2022-01-21T08:38:50.000Z | 2022-01-21T08:42:04.000Z | defmodule EpiLocatorWeb.HealthCheckController do
use EpiLocatorWeb, :controller
def index(conn, _params) do
text(conn, "OK")
end
end
| 18 | 48 | 0.75 |
9e9db2d2262572ffbcce3f0790fa0f95b05ca042 | 504 | ex | Elixir | lib/mix/tasks/check_and_stop_server.ex | TenTakano/Minecraft_Controller | a118a6e9694da3c0fdfa9ec93872790d38f093e3 | [
"MIT"
] | null | null | null | lib/mix/tasks/check_and_stop_server.ex | TenTakano/Minecraft_Controller | a118a6e9694da3c0fdfa9ec93872790d38f093e3 | [
"MIT"
] | 29 | 2021-02-13T06:54:56.000Z | 2021-06-06T09:55:36.000Z | lib/mix/tasks/check_and_stop_server.ex | TenTakano/Minecraft_Controller | a118a6e9694da3c0fdfa9ec93872790d38f093e3 | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.CheckAndStopServer do
use Mix.Task
alias MinecraftController.RCON
alias MinecraftController.EC2
def run(_) do
with(
{:ok, player_list} <- RCON.get_player_list(),
true <- is_able_to_shutdown(player_list),
{:ok, _} <- RCON.stop_server()
) do
EC2.stop_instance()
else
false -> :ok
end
end
@spec is_able_to_shutdown(map) :: boolean
defp is_able_to_shutdown(%{count: 0}), do: true
defp is_able_to_shutdown(_), do: false
end
| 21.913043 | 51 | 0.666667 |
9e9deadd0fc1f4dcf514fc5f745c47f3656b23c9 | 1,090 | ex | Elixir | apps/legion/lib/identity/auth/access_control/permission_set.ex | i386-64/legion | 41ae99af9be962d7fb38726ddf4bb0456edb5ca4 | [
"Apache-2.0"
] | 1 | 2021-01-04T11:06:12.000Z | 2021-01-04T11:06:12.000Z | apps/legion/lib/identity/auth/access_control/permission_set.ex | i386-64/legion | 41ae99af9be962d7fb38726ddf4bb0456edb5ca4 | [
"Apache-2.0"
] | 3 | 2021-01-30T06:40:37.000Z | 2021-01-30T06:41:08.000Z | apps/legion/lib/identity/auth/access_control/permission_set.ex | i386-64/legion | 41ae99af9be962d7fb38726ddf4bb0456edb5ca4 | [
"Apache-2.0"
] | null | null | null | defmodule Legion.Identity.Auth.AccessControl.PermissionSet do
@moduledoc """
Contains a group of permissions to enhance ease of usage.
"""
use Legion.Stereotype, :model
alias Legion.Identity.Information.Registration
@env Application.get_env(:legion, Legion.Identity.Auth.AccessControl)
@name_length Keyword.fetch!(@env, :permission_set_name_length)
@description_length Keyword.fetch!(@env, :permission_set_description_length)
schema "permission_sets" do
field :name
field :description
belongs_to :user, Registration
end
def changeset(struct, params \\ %{}) do
# FIXME: Proposal made by Chatatata, https://groups.google.com/forum/#!topic/elixir-ecto/GDTOHOiJ6qc.
struct
|> cast(params, [:name, :description, :user_id])
|> validate_required([:name, :description, :user_id])
|> validate_length(:name, min: Enum.min(@name_length), max: Enum.max(@name_length))
|> validate_length(:description,
min: Enum.min(@description_length),
max: Enum.max(@description_length)
)
|> foreign_key_constraint(:user_id)
end
end
| 34.0625 | 105 | 0.724771 |
9e9dfda1a8ac27aba8aec2d4c96709eda91f2b56 | 4,002 | ex | Elixir | lib/github/merge/api.ex | Simspace/bors-n | a8e8358d3a5e1e5056745e1811eef35d770b13d1 | [
"Apache-2.0"
] | null | null | null | lib/github/merge/api.ex | Simspace/bors-n | a8e8358d3a5e1e5056745e1811eef35d770b13d1 | [
"Apache-2.0"
] | 56 | 2021-06-16T19:23:06.000Z | 2022-03-28T15:11:50.000Z | lib/github/merge/api.ex | Simspace/bors-n | a8e8358d3a5e1e5056745e1811eef35d770b13d1 | [
"Apache-2.0"
] | 5 | 2020-11-18T23:38:29.000Z | 2021-09-30T17:45:56.000Z | defmodule BorsNG.GitHub.Merge.API do
@moduledoc """
Merge together batch patches using GitHub's API. This is
the default.
"""
alias BorsNG.Worker.Batcher
alias BorsNG.Database.Repo
alias BorsNG.Database.Project
alias BorsNG.GitHub
require Logger
def merge_batch!(batch, patch_links, base) do
project = batch.project
repo_conn = Project.installation_connection(project.repo_xref, Repo)
stmp = "#{project.staging_branch}.tmp"
tbase = %{
tree: base.tree,
commit:
GitHub.synthesize_commit!(
repo_conn,
%{
branch: stmp,
tree: base.tree,
parents: [base.commit],
commit_message: "[ci skip][skip ci][skip netlify]",
committer: nil
}
)
}
do_merge_patch = fn %{patch: patch}, branch ->
case branch do
:conflict ->
:conflict
:canceled ->
:canceled
_ ->
GitHub.merge_branch!(
repo_conn,
%{
from: patch.commit,
to: stmp,
commit_message:
"[ci skip][skip ci][skip netlify] -bors-staging-tmp-#{patch.pr_xref}"
}
)
end
end
Enum.reduce(patch_links, tbase, do_merge_patch)
end
def squash_merge_batch!(batch, patch_links, base, toml) do
repo_conn = Project.installation_connection(batch.project.repo_xref, Repo)
stmp = "#{batch.project.staging_branch}-squash-merge.tmp"
GitHub.force_push!(repo_conn, base.commit, stmp)
new_head =
Enum.reduce(patch_links, base.commit, fn patch_link, prev_head ->
Logger.debug("Patch Link #{inspect(patch_link)}")
Logger.debug("Patch #{inspect(patch_link.patch)}")
{:ok, commits} = GitHub.get_pr_commits(repo_conn, patch_link.patch.pr_xref)
{:ok, pr} = GitHub.get_pr(repo_conn, patch_link.patch.pr_xref)
{token, _} = repo_conn
user = GitHub.get_user_by_login!(token, pr.user.login)
Logger.debug("PR #{inspect(pr)}")
Logger.debug("User #{inspect(user)}")
# If a user doesn't have a public email address in their GH profile
# then get the email from the first commit to the PR
user_email =
if user.email != nil do
user.email
else
Enum.at(commits, 0).author_email
end
# The head SHA is the final commit in the PR.
source_sha = pr.head_sha
Logger.info("Staging branch #{stmp}")
Logger.info("Commit sha #{source_sha}")
# Create a merge commit for each PR.
# Because each PR is merged on top of each other in stmp, we can verify against any merge conflicts
merge_commit =
GitHub.merge_branch!(
repo_conn,
%{
from: source_sha,
to: stmp,
commit_message: "[ci skip][skip ci][skip netlify] -bors-staging-tmp-#{source_sha}"
}
)
Logger.info("Merge Commit #{inspect(merge_commit)}")
Logger.info("Previous Head #{inspect(prev_head)}")
# Then compress the merge commit's changes into a single commit,
# append it to the previous commit
# Because the merges are iterative they contain *only* the changes from the PR vs the previous PR(or head)
commit_message =
Batcher.Message.generate_squash_commit_message(
pr,
commits,
user_email,
toml.cut_body_after
)
cpt =
GitHub.create_commit!(
repo_conn,
%{
tree: merge_commit.tree,
parents: [prev_head],
commit_message: commit_message,
committer: %{name: user.name || user.login, email: user_email}
}
)
Logger.info("Commit Sha #{inspect(cpt)}")
cpt
end)
GitHub.delete_branch!(repo_conn, stmp)
new_head
end
end
| 28.791367 | 114 | 0.577711 |
9e9e06bddf2f94199cf5c05f67c89018ccfadbe8 | 3,758 | ex | Elixir | lib/exfile_b2/local_cache.ex | keichan34/exfile-b2 | 4251f5c8fba6b11980d048c5e8bdcf1d4407984d | [
"MIT"
] | 8 | 2016-01-08T09:16:50.000Z | 2019-05-10T19:44:29.000Z | lib/exfile_b2/local_cache.ex | keichan34/exfile-b2 | 4251f5c8fba6b11980d048c5e8bdcf1d4407984d | [
"MIT"
] | 3 | 2016-03-28T12:10:10.000Z | 2016-08-16T06:03:18.000Z | lib/exfile_b2/local_cache.ex | keichan34/exfile-b2 | 4251f5c8fba6b11980d048c5e8bdcf1d4407984d | [
"MIT"
] | 2 | 2016-07-02T23:35:50.000Z | 2019-09-15T14:59:43.000Z | defmodule ExfileB2.LocalCache do
@moduledoc """
The manager for ExfileB2's local cache of files.
"""
use GenServer
# In ms, 30 seconds.
@vacuum_interval 30_000
def start_link do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
def fetch(key),
do: GenServer.call(__MODULE__, {:fetch, key})
def store(key, iodata) do
delete(key)
byte_size = :erlang.iolist_size(iodata)
case GenServer.call(__MODULE__, {:store, key, byte_size}) do
{:ok, path} ->
copy_iodata_to_path(iodata, path)
error ->
{:error, error}
end
end
defp copy_iodata_to_path(iodata, path) do
case File.open(path, [:write], &IO.binwrite(&1, iodata)) do
{:ok, _} ->
{:ok, path}
error -> error
end
end
def delete(key),
do: GenServer.call(__MODULE__, {:delete, key})
def size(key),
do: GenServer.call(__MODULE__, {:size, key})
def flush(),
do: GenServer.call(__MODULE__, :flush)
def vacuum(),
do: GenServer.call(__MODULE__, :vacuum)
## GenServer Callbacks
def init(:ok) do
Process.send_after(self, :vacuum, @vacuum_interval)
{:ok, initial_state}
end
def handle_call({:fetch, key}, _from, state) do
{reply, state} = perform_fetch(state, key)
{:reply, reply, state}
end
def handle_call({:store, key, byte_size}, _from, state) do
{reply, state} = case Exfile.Tempfile.random_file("b2-local-cache") do
{:ok, path} ->
state = state
|> update_in([:cache], &Map.put(&1, key, {ts, byte_size, path}))
|> update_in([:bytes_used], &(&1 + byte_size))
{{:ok, path}, state}
error ->
{error, state}
end
{:reply, reply, state}
end
def handle_call({:delete, key}, _from, state) do
{:reply, :ok, perform_delete(state, key)}
end
def handle_call(:flush, _from, state) do
:ok = perform_flush(state)
{:reply, :ok, initial_state}
end
def handle_call(:vacuum, _from, state) do
{:reply, :ok, perform_vacuum(state, cache_size)}
end
def handle_info(:vacuum, state) do
state = perform_vacuum(state, cache_size)
_ = Process.send_after(self, :vacuum, @vacuum_interval)
{:noreply, state}
end
def terminate(_reason, state) do
perform_flush(state)
end
defp perform_fetch(%{cache: cache} = state, key) do
case Map.fetch(cache, key) do
{:ok, {_last_used, byte_size, path}} ->
state = state
|> update_in([:cache], &Map.put(&1, key, {ts, byte_size, path}))
{{:ok, byte_size, path}, state}
_ ->
{:error, state}
end
end
defp perform_delete(%{cache: cache} = state, key) do
case Map.fetch(cache, key) do
{:ok, {_, byte_size, path}} ->
_ = File.rm(path)
state
|> update_in([:cache], &Map.delete(&1, key))
|> update_in([:bytes_used], &(&1 - byte_size))
_ ->
state
end
end
defp perform_flush(%{cache: cache}) do
for {_, _, path} <- Map.values(cache) do
_ = File.rm(path)
end
:ok
end
defp perform_vacuum(%{bytes_used: bytes} = state, cache_size) when bytes < cache_size,
do: state
defp perform_vacuum(%{cache: cache} = state, cache_size) do
state
|> perform_delete(lru_key cache)
|> perform_vacuum(cache_size)
end
defp lru_key(cache) do
Enum.reduce(cache, {ts + 1_000_000, nil}, fn
({key, {time, _, _}}, {least_access_time, _key}) when time < least_access_time ->
{time, key}
(_, {time, key}) ->
{time, key}
end) |> elem(1)
end
defp ts, do: :erlang.system_time(:micro_seconds)
defp cache_size,
do: Application.get_env(:exfile_b2, :local_cache_size, 100_000_000)
defp initial_state, do: %{cache: %{}, bytes_used: 0}
end
| 25.053333 | 88 | 0.611495 |
9e9e2bb965267b4d745d48baefe88a6eceec3c74 | 1,134 | exs | Elixir | test/level_web/graphql/queries/list_bookmarks_test.exs | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 928 | 2018-04-03T16:18:11.000Z | 2019-09-09T17:59:55.000Z | test/level_web/graphql/queries/list_bookmarks_test.exs | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 74 | 2018-04-03T00:46:50.000Z | 2019-03-10T18:57:27.000Z | test/level_web/graphql/queries/list_bookmarks_test.exs | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 89 | 2018-04-03T17:33:20.000Z | 2019-08-19T03:40:20.000Z | defmodule LevelWeb.GraphQL.ListBookmarksTest do
use LevelWeb.ConnCase, async: true
import LevelWeb.GraphQL.TestHelpers
alias Level.Groups
@query """
query GetBookmarkedGroups(
$space_id: ID!
) {
spaceUser(spaceId: $space_id) {
bookmarks {
name
}
}
}
"""
setup %{conn: conn} do
{:ok, user} = create_user()
conn = authenticate_with_jwt(conn, user)
{:ok, %{conn: conn, user: user}}
end
test "space memberships can list bookmarked groups", %{conn: conn, user: user} do
{:ok, %{space: space, space_user: space_user}} = create_space(user, %{name: "Level"})
{:ok, %{group: group}} = create_group(space_user, %{name: "Engineering"})
Groups.bookmark_group(group, space_user)
conn =
conn
|> put_graphql_headers()
|> post("/graphql", %{query: @query, variables: %{space_id: space.id}})
%{
"data" => %{
"spaceUser" => %{
"bookmarks" => bookmarks
}
}
} = json_response(conn, 200)
assert Enum.any?(bookmarks, fn bookmark -> bookmark["name"] == "Engineering" end)
end
end
| 24.652174 | 89 | 0.590829 |
9e9e3b6952a6d63634d348c4af1e8199ee00d72b | 557 | ex | Elixir | lib/supreme_tsugu_chan_web/router.ex | c18t/supreme-tsugu-chan | 9d1d4cffcd917f2454a8a2918389ea239f2a6cdc | [
"MIT"
] | null | null | null | lib/supreme_tsugu_chan_web/router.ex | c18t/supreme-tsugu-chan | 9d1d4cffcd917f2454a8a2918389ea239f2a6cdc | [
"MIT"
] | null | null | null | lib/supreme_tsugu_chan_web/router.ex | c18t/supreme-tsugu-chan | 9d1d4cffcd917f2454a8a2918389ea239f2a6cdc | [
"MIT"
] | null | null | null | defmodule SupremeTsuguChanWeb.Router do
use SupremeTsuguChanWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", SupremeTsuguChanWeb do
pipe_through :browser # Use the default browser stack
get "/", PageController, :index
end
# Other scopes may use custom stacks.
# scope "/api", SupremeTsuguChanWeb do
# pipe_through :api
# end
end
| 20.62963 | 57 | 0.70018 |
9e9e504d7b26ea191712b0e9342cc96e94baa65d | 2,323 | exs | Elixir | test/remitano/remitano_test.exs | HPJM/crypto_apis | fe8b191a7b95ec3d066bf1881277c3f920df1457 | [
"MIT"
] | null | null | null | test/remitano/remitano_test.exs | HPJM/crypto_apis | fe8b191a7b95ec3d066bf1881277c3f920df1457 | [
"MIT"
] | null | null | null | test/remitano/remitano_test.exs | HPJM/crypto_apis | fe8b191a7b95ec3d066bf1881277c3f920df1457 | [
"MIT"
] | null | null | null | defmodule CryptoApis.RemitanoTest do
use ExUnit.Case
import Mock
alias CryptoApis.Remitano
import CryptoApis.Fixtures
describe "order_book" do
test "order_book/1 responds ok" do
with_mock HTTPoison,
get: fn url, _headers, options ->
{:ok, successful_response(url: url, options: options)}
end do
assert {:ok, response} = Remitano.order_book(:BTCVND)
assert response.status_code == 200
assert response.request_url == "https://api.remitano.com/api/v1/markets/BTCVND/order_book"
end
end
end
describe "trades" do
test "trades/1 responds ok" do
with_mock HTTPoison,
get: fn url, _headers, options ->
{:ok, successful_response(url: url, options: options)}
end do
assert {:ok, response} = Remitano.trades("BTCVND")
assert response.status_code == 200
assert response.request_url == "https://api.remitano.com/api/v1/markets/BTCVND/trades"
end
end
end
describe "volume" do
test "volume/0 responds ok" do
with_mock HTTPoison,
get: fn url, _headers, options ->
{:ok, successful_response(url: url, options: options)}
end do
assert {:ok, response} = Remitano.volume()
assert response.status_code == 200
assert response.request_url ==
"https://api.remitano.com/api/v1/volumes/market_summaries"
end
end
end
describe "currencies" do
test "currencies/0 responds ok" do
with_mock HTTPoison,
get: fn url, _headers, options ->
{:ok, successful_response(url: url, options: options)}
end do
assert {:ok, response} = Remitano.currencies()
assert response.status_code == 200
assert response.request_url ==
"https://api.remitano.com/api/v1/currencies/info"
end
end
end
describe "markets" do
test "markets/0 responds ok" do
with_mock HTTPoison,
get: fn url, _headers, options ->
{:ok, successful_response(url: url, options: options)}
end do
assert {:ok, response} = Remitano.markets()
assert response.status_code == 200
assert response.request_url ==
"https://api.remitano.com/api/v1/markets/info"
end
end
end
end
| 29.782051 | 98 | 0.62204 |
9e9e52dee05f263304a1e2bb5c1950295c95120d | 1,892 | ex | Elixir | lib/sftp/stream.ex | wstucco/sftp_ex | 33e60c691a1aece024aca9d163b8aa24100d9d7d | [
"MIT"
] | null | null | null | lib/sftp/stream.ex | wstucco/sftp_ex | 33e60c691a1aece024aca9d163b8aa24100d9d7d | [
"MIT"
] | null | null | null | lib/sftp/stream.ex | wstucco/sftp_ex | 33e60c691a1aece024aca9d163b8aa24100d9d7d | [
"MIT"
] | null | null | null | defmodule SFTP.Stream do
@moduledoc "
A stream to download/upload a file from a server through SFTP
"
alias SFTP.AccessService, as: AccessSvc
alias SFTP.TransferService, as: TransferSvc
defstruct connection: nil, path: nil, byte_length: 32768
@type t :: %__MODULE__{}
@doc false
def __build__(connection, path, byte_length) do
%SFTP.Stream{connection: connection, path: path, byte_length: byte_length}
end
defimpl Collectable do
def into(%{connection: connection, path: path, byte_length: byte_length} = stream) do
case AccessSvc.open(connection, path, [:write, :binary, :creat]) do
{:error, reason} -> {:error, reason}
{:ok, handle} -> {:ok, into(connection, handle, stream)}
end
end
defp into(connection, handle, stream) do
fn
:ok, {:cont, x} -> TransferSvc.write(connection, handle, x)
:ok, :done ->
:ok = AccessSvc.close(connection, handle)
stream
:ok, :halt ->
:ok = AccessSvc.close(connection, handle)
end
end
end
defimpl Enumerable do
def reduce(%{connection: connection, path: path, byte_length: byte_length}, acc, fun) do
start_function =
fn ->
case AccessSvc.open(connection, path, [:read, :binary]) do
{:error, reason} -> raise File.Error, reason: reason, action: "stream", path: path
{:ok, handle} -> handle
end
end
next_function = &TransferSvc.each_binstream(connection, &1, byte_length)
close_function = &AccessSvc.close(connection, &1)
Stream.resource(start_function, next_function, close_function).(acc, fun)
end
def count(_stream) do
{:error, __MODULE__}
end
def member?(_stream, _term) do
{:error, __MODULE__}
end
end
end | 30.516129 | 100 | 0.607822 |
9e9e8192e41116b5b4222ac741bf265a96a15eac | 475 | ex | Elixir | lib/rasa_sdk/model/template_description.ex | r26D/rasa-sdk-elixir | 65da9444add5da2e2c0989c36bcbdf902733c709 | [
"Apache-2.0"
] | 2 | 2020-09-24T09:58:52.000Z | 2021-02-08T12:35:55.000Z | lib/rasa_sdk/model/template_description.ex | r26D/rasa-sdk-elixir | 65da9444add5da2e2c0989c36bcbdf902733c709 | [
"Apache-2.0"
] | null | null | null | lib/rasa_sdk/model/template_description.ex | r26D/rasa-sdk-elixir | 65da9444add5da2e2c0989c36bcbdf902733c709 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule RasaSDK.Model.TemplateDescription do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:text
]
@type t :: %__MODULE__{
text: String.t()
}
end
defimpl Poison.Decoder, for: RasaSDK.Model.TemplateDescription do
def decode(value, _options) do
value
end
end
| 19 | 91 | 0.682105 |
9e9e83fde854743cced956778f34ef0b6f44a0d2 | 705 | exs | Elixir | test/inmana_web/views/restaurants_view_test.exs | alexfariac/inmana | 8ff51040c7c4902ef7a62b129373e5cad054c275 | [
"MIT"
] | null | null | null | test/inmana_web/views/restaurants_view_test.exs | alexfariac/inmana | 8ff51040c7c4902ef7a62b129373e5cad054c275 | [
"MIT"
] | null | null | null | test/inmana_web/views/restaurants_view_test.exs | alexfariac/inmana | 8ff51040c7c4902ef7a62b129373e5cad054c275 | [
"MIT"
] | null | null | null | defmodule InmanaWeb.RestaurantsViewTest do
use InmanaWeb.ConnCase, async: true
import Phoenix.View
alias InmanaWeb.RestaurantsView
alias Inmana.Restaurant
describe "render/2" do
test "Renders create.json" do
params = %{name: "Siri Cascudo", email: "[email protected]"}
{:ok, restaurant} = Inmana.create_restaurant(params)
response = render(RestaurantsView, "create.json", restaurant: restaurant)
assert %{
message: "Restaurant Created!",
restaurant: %Restaurant{
email: "[email protected]",
id: _id,
name: "Siri Cascudo"
}
} = response
end
end
end
| 25.178571 | 79 | 0.597163 |
9e9ec15811ec732900e7cf1bf34ae390f5d477c3 | 379 | ex | Elixir | test/factories/assessments/submission_factory.ex | Hou-Rui/cadet | f9036d76005bf3b267b632dce176067ae1a19f71 | [
"Apache-2.0"
] | null | null | null | test/factories/assessments/submission_factory.ex | Hou-Rui/cadet | f9036d76005bf3b267b632dce176067ae1a19f71 | [
"Apache-2.0"
] | 10 | 2022-02-24T17:57:38.000Z | 2022-03-31T07:43:05.000Z | test/factories/assessments/submission_factory.ex | Hou-Rui/cadet | f9036d76005bf3b267b632dce176067ae1a19f71 | [
"Apache-2.0"
] | 1 | 2019-07-17T15:51:58.000Z | 2019-07-17T15:51:58.000Z | defmodule Cadet.Assessments.SubmissionFactory do
@moduledoc """
Factory for the Submission entity
"""
defmacro __using__(_opts) do
quote do
alias Cadet.Assessments.Submission
def submission_factory do
%Submission{
student: build(:user, %{role: :student}),
assessment: build(:assessment)
}
end
end
end
end
| 19.947368 | 51 | 0.635884 |
9e9ed5274dc0b61df738021f189b6d3512cab7df | 1,359 | ex | Elixir | lib/codes/codes_w67.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_w67.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_w67.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_W67 do
alias IcdCode.ICDCode
def _W67XXXA do
%ICDCode{full_code: "W67XXXA",
category_code: "W67",
short_code: "XXXA",
full_name: "Accidental drowning and submersion while in swimming-pool, initial encounter",
short_name: "Accidental drowning and submersion while in swimming-pool, initial encounter",
category_name: "Accidental drowning and submersion while in swimming-pool, initial encounter"
}
end
def _W67XXXD do
%ICDCode{full_code: "W67XXXD",
category_code: "W67",
short_code: "XXXD",
full_name: "Accidental drowning and submersion while in swimming-pool, subsequent encounter",
short_name: "Accidental drowning and submersion while in swimming-pool, subsequent encounter",
category_name: "Accidental drowning and submersion while in swimming-pool, subsequent encounter"
}
end
def _W67XXXS do
%ICDCode{full_code: "W67XXXS",
category_code: "W67",
short_code: "XXXS",
full_name: "Accidental drowning and submersion while in swimming-pool, sequela",
short_name: "Accidental drowning and submersion while in swimming-pool, sequela",
category_name: "Accidental drowning and submersion while in swimming-pool, sequela"
}
end
end
| 39.970588 | 106 | 0.688006 |
9e9ed695f43304a3176eb45f8c78e49a7c81ff70 | 1,867 | ex | Elixir | lib/rayray/world.ex | ckampfe/rayray | 933f5fc095c8e959adf2f685ead4e780f766e451 | [
"MIT"
] | null | null | null | lib/rayray/world.ex | ckampfe/rayray | 933f5fc095c8e959adf2f685ead4e780f766e451 | [
"MIT"
] | null | null | null | lib/rayray/world.ex | ckampfe/rayray | 933f5fc095c8e959adf2f685ead4e780f766e451 | [
"MIT"
] | null | null | null | defmodule Rayray.World do
alias Rayray.Intersect
alias Rayray.Intersection
alias Rayray.Lights
alias Rayray.Material
alias Rayray.Matrix
alias Rayray.Ray
alias Rayray.Sphere
alias Rayray.Tuple
defstruct objects: [], light: nil
def new() do
%__MODULE__{}
end
def default() do
light = Lights.point_light(Tuple.point(-10, 10, -10), Tuple.color(1, 1, 1))
material = Material.new()
material = %{material | color: Tuple.color(0.8, 1.0, 0.6), diffuse: 0.7, specular: 0.2}
s1 = Sphere.new()
s1 = %{s1 | material: material}
s2 = Sphere.new()
s2 = %{s2 | transform: Matrix.scaling(0.5, 0.5, 0.5)}
%__MODULE__{
objects: [s1, s2],
light: light
}
end
def contains?(%__MODULE__{objects: objects}, object) do
:lists.member(object, objects)
end
def intersect_world(world, ray) do
world.objects
|> Enum.flat_map(fn object ->
Intersect.intersect(object, ray)
end)
|> Enum.sort_by(fn %{t: t} ->
t
end)
end
def shade_hit(world, comps) do
Lights.lighting(
comps.object.material,
world.light,
comps.over_point,
comps.eyev,
comps.normalv,
is_shadowed(world, comps.over_point)
)
end
def color_at(world, ray) do
intersections = intersect_world(world, ray)
intersection = Intersection.hit(intersections)
if is_nil(intersection) do
Tuple.color(0, 0, 0)
else
comps = Intersection.prepare_computations(intersection, ray)
shade_hit(world, comps)
end
end
def is_shadowed(world, point) do
v = Tuple.subtract(world.light.position, point)
distance = Tuple.magnitude(v)
direction = Tuple.normalize(v)
ray = Ray.new(point, direction)
intersections = intersect_world(world, ray)
hit = Intersection.hit(intersections)
hit && hit.t < distance
end
end
| 23.3375 | 91 | 0.647563 |
9e9edab659c0b9e7b4736c2297ef6d923d6e2cfa | 1,806 | ex | Elixir | lib/codes/codes_a23.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_a23.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_a23.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_A23 do
alias IcdCode.ICDCode
def _A230 do
%ICDCode{full_code: "A230",
category_code: "A23",
short_code: "0",
full_name: "Brucellosis due to Brucella melitensis",
short_name: "Brucellosis due to Brucella melitensis",
category_name: "Brucellosis due to Brucella melitensis"
}
end
def _A231 do
%ICDCode{full_code: "A231",
category_code: "A23",
short_code: "1",
full_name: "Brucellosis due to Brucella abortus",
short_name: "Brucellosis due to Brucella abortus",
category_name: "Brucellosis due to Brucella abortus"
}
end
def _A232 do
%ICDCode{full_code: "A232",
category_code: "A23",
short_code: "2",
full_name: "Brucellosis due to Brucella suis",
short_name: "Brucellosis due to Brucella suis",
category_name: "Brucellosis due to Brucella suis"
}
end
def _A233 do
%ICDCode{full_code: "A233",
category_code: "A23",
short_code: "3",
full_name: "Brucellosis due to Brucella canis",
short_name: "Brucellosis due to Brucella canis",
category_name: "Brucellosis due to Brucella canis"
}
end
def _A238 do
%ICDCode{full_code: "A238",
category_code: "A23",
short_code: "8",
full_name: "Other brucellosis",
short_name: "Other brucellosis",
category_name: "Other brucellosis"
}
end
def _A239 do
%ICDCode{full_code: "A239",
category_code: "A23",
short_code: "9",
full_name: "Brucellosis, unspecified",
short_name: "Brucellosis, unspecified",
category_name: "Brucellosis, unspecified"
}
end
end
| 29.606557 | 65 | 0.602436 |
9e9eedccc8112dac7d6a7d5e72a0380fcdc99874 | 13,237 | ex | Elixir | clients/big_query/lib/google_api/big_query/v2/model/table.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/big_query/lib/google_api/big_query/v2/model/table.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/big_query/lib/google_api/big_query/v2/model/table.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigQuery.V2.Model.Table do
@moduledoc """
## Attributes
* `etag` (*type:* `String.t`, *default:* `nil`) - [Output-only] A hash of the table metadata. Used to ensure there were no concurrent modifications to the resource when attempting an update. Not guaranteed to change when the table contents or the fields numRows, numBytes, numLongTermBytes or lastModifiedTime change.
* `numLongTermBytes` (*type:* `String.t`, *default:* `nil`) - [Output-only] The number of bytes in the table that are considered "long-term storage".
* `requirePartitionFilter` (*type:* `boolean()`, *default:* `false`) - [Optional] If set to true, queries over this table require a partition filter that can be used for partition elimination to be specified.
* `num_long_term_physical_bytes` (*type:* `String.t`, *default:* `nil`) - [Output-only] Number of physical bytes more than 90 days old. This data is not kept in real time, and might be delayed by a few seconds to a few minutes.
* `num_partitions` (*type:* `String.t`, *default:* `nil`) - [Output-only] The number of partitions present in the table or materialized view. This data is not kept in real time, and might be delayed by a few seconds to a few minutes.
* `description` (*type:* `String.t`, *default:* `nil`) - [Optional] A user-friendly description of this table.
* `snapshotDefinition` (*type:* `GoogleApi.BigQuery.V2.Model.SnapshotDefinition.t`, *default:* `nil`) - [Output-only] Snapshot definition.
* `kind` (*type:* `String.t`, *default:* `bigquery#table`) - [Output-only] The type of the resource.
* `cloneDefinition` (*type:* `GoogleApi.BigQuery.V2.Model.CloneDefinition.t`, *default:* `nil`) - [Output-only] Clone definition.
* `num_active_physical_bytes` (*type:* `String.t`, *default:* `nil`) - [Output-only] Number of physical bytes less than 90 days old. This data is not kept in real time, and might be delayed by a few seconds to a few minutes.
* `numPhysicalBytes` (*type:* `String.t`, *default:* `nil`) - [Output-only] [TrustedTester] The physical size of this table in bytes, excluding any data in the streaming buffer. This includes compression and storage used for time travel.
* `numBytes` (*type:* `String.t`, *default:* `nil`) - [Output-only] The size of this table in bytes, excluding any data in the streaming buffer.
* `num_time_travel_physical_bytes` (*type:* `String.t`, *default:* `nil`) - [Output-only] Number of physical bytes used by time travel storage (deleted or changed data). This data is not kept in real time, and might be delayed by a few seconds to a few minutes.
* `lastModifiedTime` (*type:* `String.t`, *default:* `nil`) - [Output-only] The time when this table was last modified, in milliseconds since the epoch.
* `clustering` (*type:* `GoogleApi.BigQuery.V2.Model.Clustering.t`, *default:* `nil`) - [Beta] Clustering specification for the table. Must be specified with partitioning, data in the table will be first partitioned and subsequently clustered.
* `location` (*type:* `String.t`, *default:* `nil`) - [Output-only] The geographic location where the table resides. This value is inherited from the dataset.
* `friendlyName` (*type:* `String.t`, *default:* `nil`) - [Optional] A descriptive name for this table.
* `expirationTime` (*type:* `String.t`, *default:* `nil`) - [Optional] The time when this table expires, in milliseconds since the epoch. If not present, the table will persist indefinitely. Expired tables will be deleted and their storage reclaimed. The defaultTableExpirationMs property of the encapsulating dataset can be used to set a default expirationTime on newly created tables.
* `timePartitioning` (*type:* `GoogleApi.BigQuery.V2.Model.TimePartitioning.t`, *default:* `nil`) - Time-based partitioning specification for this table. Only one of timePartitioning and rangePartitioning should be specified.
* `model` (*type:* `GoogleApi.BigQuery.V2.Model.ModelDefinition.t`, *default:* `nil`) - [Output-only, Beta] Present iff this table represents a ML model. Describes the training information for the model, and it is required to run 'PREDICT' queries.
* `creationTime` (*type:* `String.t`, *default:* `nil`) - [Output-only] The time when this table was created, in milliseconds since the epoch.
* `view` (*type:* `GoogleApi.BigQuery.V2.Model.ViewDefinition.t`, *default:* `nil`) - [Optional] The view definition.
* `num_total_logical_bytes` (*type:* `String.t`, *default:* `nil`) - [Output-only] Total number of logical bytes in the table or materialized view.
* `defaultCollation` (*type:* `String.t`, *default:* `nil`) - [Output-only] The default collation of the table.
* `num_active_logical_bytes` (*type:* `String.t`, *default:* `nil`) - [Output-only] Number of logical bytes that are less than 90 days old.
* `tableReference` (*type:* `GoogleApi.BigQuery.V2.Model.TableReference.t`, *default:* `nil`) - [Required] Reference describing the ID of this table.
* `externalDataConfiguration` (*type:* `GoogleApi.BigQuery.V2.Model.ExternalDataConfiguration.t`, *default:* `nil`) - [Optional] Describes the data format, location, and other properties of a table stored outside of BigQuery. By defining these properties, the data source can then be queried as if it were a standard BigQuery table.
* `type` (*type:* `String.t`, *default:* `nil`) - [Output-only] Describes the table type. The following values are supported: TABLE: A normal BigQuery table. VIEW: A virtual table defined by a SQL query. SNAPSHOT: An immutable, read-only table that is a copy of another table. [TrustedTester] MATERIALIZED_VIEW: SQL query whose result is persisted. EXTERNAL: A table that references data stored in an external storage system, such as Google Cloud Storage. The default value is TABLE.
* `numRows` (*type:* `String.t`, *default:* `nil`) - [Output-only] The number of rows of data in this table, excluding any data in the streaming buffer.
* `id` (*type:* `String.t`, *default:* `nil`) - [Output-only] An opaque ID uniquely identifying the table.
* `num_total_physical_bytes` (*type:* `String.t`, *default:* `nil`) - [Output-only] The physical size of this table in bytes. This also includes storage used for time travel. This data is not kept in real time, and might be delayed by a few seconds to a few minutes.
* `rangePartitioning` (*type:* `GoogleApi.BigQuery.V2.Model.RangePartitioning.t`, *default:* `nil`) - [TrustedTester] Range partitioning specification for this table. Only one of timePartitioning and rangePartitioning should be specified.
* `materializedView` (*type:* `GoogleApi.BigQuery.V2.Model.MaterializedViewDefinition.t`, *default:* `nil`) - [Optional] Materialized view definition.
* `labels` (*type:* `map()`, *default:* `nil`) - The labels associated with this table. You can use these to organize and group your tables. Label keys and values can be no longer than 63 characters, can only contain lowercase letters, numeric characters, underscores and dashes. International characters are allowed. Label values are optional. Label keys must start with a letter and each label in the list must have a different key.
* `selfLink` (*type:* `String.t`, *default:* `nil`) - [Output-only] A URL that can be used to access this resource again.
* `encryptionConfiguration` (*type:* `GoogleApi.BigQuery.V2.Model.EncryptionConfiguration.t`, *default:* `nil`) - Custom encryption configuration (e.g., Cloud KMS keys).
* `schema` (*type:* `GoogleApi.BigQuery.V2.Model.TableSchema.t`, *default:* `nil`) - [Optional] Describes the schema of this table.
* `num_long_term_logical_bytes` (*type:* `String.t`, *default:* `nil`) - [Output-only] Number of logical bytes that are more than 90 days old.
* `streamingBuffer` (*type:* `GoogleApi.BigQuery.V2.Model.Streamingbuffer.t`, *default:* `nil`) - [Output-only] Contains information regarding this table's streaming buffer, if one is present. This field will be absent if the table is not being streamed to or if there is no data in the streaming buffer.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:etag => String.t() | nil,
:numLongTermBytes => String.t() | nil,
:requirePartitionFilter => boolean() | nil,
:num_long_term_physical_bytes => String.t() | nil,
:num_partitions => String.t() | nil,
:description => String.t() | nil,
:snapshotDefinition => GoogleApi.BigQuery.V2.Model.SnapshotDefinition.t() | nil,
:kind => String.t() | nil,
:cloneDefinition => GoogleApi.BigQuery.V2.Model.CloneDefinition.t() | nil,
:num_active_physical_bytes => String.t() | nil,
:numPhysicalBytes => String.t() | nil,
:numBytes => String.t() | nil,
:num_time_travel_physical_bytes => String.t() | nil,
:lastModifiedTime => String.t() | nil,
:clustering => GoogleApi.BigQuery.V2.Model.Clustering.t() | nil,
:location => String.t() | nil,
:friendlyName => String.t() | nil,
:expirationTime => String.t() | nil,
:timePartitioning => GoogleApi.BigQuery.V2.Model.TimePartitioning.t() | nil,
:model => GoogleApi.BigQuery.V2.Model.ModelDefinition.t() | nil,
:creationTime => String.t() | nil,
:view => GoogleApi.BigQuery.V2.Model.ViewDefinition.t() | nil,
:num_total_logical_bytes => String.t() | nil,
:defaultCollation => String.t() | nil,
:num_active_logical_bytes => String.t() | nil,
:tableReference => GoogleApi.BigQuery.V2.Model.TableReference.t() | nil,
:externalDataConfiguration =>
GoogleApi.BigQuery.V2.Model.ExternalDataConfiguration.t() | nil,
:type => String.t() | nil,
:numRows => String.t() | nil,
:id => String.t() | nil,
:num_total_physical_bytes => String.t() | nil,
:rangePartitioning => GoogleApi.BigQuery.V2.Model.RangePartitioning.t() | nil,
:materializedView => GoogleApi.BigQuery.V2.Model.MaterializedViewDefinition.t() | nil,
:labels => map() | nil,
:selfLink => String.t() | nil,
:encryptionConfiguration =>
GoogleApi.BigQuery.V2.Model.EncryptionConfiguration.t() | nil,
:schema => GoogleApi.BigQuery.V2.Model.TableSchema.t() | nil,
:num_long_term_logical_bytes => String.t() | nil,
:streamingBuffer => GoogleApi.BigQuery.V2.Model.Streamingbuffer.t() | nil
}
field(:etag)
field(:numLongTermBytes)
field(:requirePartitionFilter)
field(:num_long_term_physical_bytes)
field(:num_partitions)
field(:description)
field(:snapshotDefinition, as: GoogleApi.BigQuery.V2.Model.SnapshotDefinition)
field(:kind)
field(:cloneDefinition, as: GoogleApi.BigQuery.V2.Model.CloneDefinition)
field(:num_active_physical_bytes)
field(:numPhysicalBytes)
field(:numBytes)
field(:num_time_travel_physical_bytes)
field(:lastModifiedTime)
field(:clustering, as: GoogleApi.BigQuery.V2.Model.Clustering)
field(:location)
field(:friendlyName)
field(:expirationTime)
field(:timePartitioning, as: GoogleApi.BigQuery.V2.Model.TimePartitioning)
field(:model, as: GoogleApi.BigQuery.V2.Model.ModelDefinition)
field(:creationTime)
field(:view, as: GoogleApi.BigQuery.V2.Model.ViewDefinition)
field(:num_total_logical_bytes)
field(:defaultCollation)
field(:num_active_logical_bytes)
field(:tableReference, as: GoogleApi.BigQuery.V2.Model.TableReference)
field(:externalDataConfiguration, as: GoogleApi.BigQuery.V2.Model.ExternalDataConfiguration)
field(:type)
field(:numRows)
field(:id)
field(:num_total_physical_bytes)
field(:rangePartitioning, as: GoogleApi.BigQuery.V2.Model.RangePartitioning)
field(:materializedView, as: GoogleApi.BigQuery.V2.Model.MaterializedViewDefinition)
field(:labels, type: :map)
field(:selfLink)
field(:encryptionConfiguration, as: GoogleApi.BigQuery.V2.Model.EncryptionConfiguration)
field(:schema, as: GoogleApi.BigQuery.V2.Model.TableSchema)
field(:num_long_term_logical_bytes)
field(:streamingBuffer, as: GoogleApi.BigQuery.V2.Model.Streamingbuffer)
end
defimpl Poison.Decoder, for: GoogleApi.BigQuery.V2.Model.Table do
def decode(value, options) do
GoogleApi.BigQuery.V2.Model.Table.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQuery.V2.Model.Table do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 81.208589 | 487 | 0.712775 |
9e9f13ed67bbbf7e87cbecca24262f40c648faa1 | 3,052 | exs | Elixir | apps/fz_http/mix.exs | kaku-io/firezone | 685da0064727df27e444fe4da2be20efe96af9cd | [
"Apache-2.0"
] | null | null | null | apps/fz_http/mix.exs | kaku-io/firezone | 685da0064727df27e444fe4da2be20efe96af9cd | [
"Apache-2.0"
] | null | null | null | apps/fz_http/mix.exs | kaku-io/firezone | 685da0064727df27e444fe4da2be20efe96af9cd | [
"Apache-2.0"
] | null | null | null | defmodule FzHttp.MixProject do
use Mix.Project
@version_path "../../scripts/version.exs"
def version do
Code.eval_file(@version_path)
|> elem(0)
end
def project do
[
app: :fz_http,
version: version(),
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.12",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {FzHttp.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:fz_common, in_umbrella: true},
{:decimal, "~> 2.0"},
{:phoenix, "~> 1.6.0-rc.0", override: true},
{:cloak, "~> 1.1"},
{:cloak_ecto, "~> 1.2"},
{:excoveralls, "~> 0.14", only: :test},
{:floki, ">= 0.0.0", only: :test},
{:httpoison, "~> 1.8"},
{:argon2_elixir, "~> 2.0"},
{:phoenix_pubsub, "~> 2.0"},
{:phoenix_ecto, "~> 4.4"},
{:ecto_sql, "~> 3.7"},
{:ecto_network, "~> 1.3"},
{:hammer, "~> 6.0"},
{:hammer_plug, "~> 2.1"},
{:inflex, "~> 2.1"},
{:plug, "~> 1.12.1"},
{:postgrex, "~> 0.15.10"},
{:phoenix_html, "~> 3.1.0"},
{:phoenix_live_reload, "~> 1.3", only: :dev},
{:phoenix_live_view, "~> 0.17.5"},
{:gettext, "~> 0.18"},
{:jason, "~> 1.2"},
# XXX: Change this when hex package is updated
{:cidr, github: "firezone/cidr-elixir"},
{:telemetry, "~> 0.4.3"},
{:plug_cowboy, "~> 2.5"},
{:credo, "~> 1.5", only: [:dev, :test], runtime: false}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.seed": "run priv/repo/seeds.exs",
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: [
"ecto.create --quiet",
"ecto.migrate",
"test"
],
"assets.compile": &compile_assets/1
]
end
defp compile_assets(_) do
Mix.shell().cmd("cd assets && ./node_modules/.bin/webpack --mode development", quiet: false)
end
end
| 28 | 96 | 0.545544 |
9e9f22bdd594f207f90fb7ae9218c72b328f35ad | 5,699 | ex | Elixir | lib/clickhousex/protocol.ex | moldmn/clickhousex | b39016714e346490ff8e9c4432006827612fba11 | [
"Apache-2.0"
] | 43 | 2018-04-02T07:00:44.000Z | 2020-08-14T18:47:26.000Z | lib/clickhousex/protocol.ex | moldmn/clickhousex | b39016714e346490ff8e9c4432006827612fba11 | [
"Apache-2.0"
] | 21 | 2020-09-07T08:19:59.000Z | 2021-08-25T03:46:38.000Z | lib/clickhousex/protocol.ex | moldmn/clickhousex | b39016714e346490ff8e9c4432006827612fba11 | [
"Apache-2.0"
] | 22 | 2018-08-08T07:45:44.000Z | 2020-09-01T07:58:54.000Z | defmodule Clickhousex.Protocol do
@moduledoc false
use DBConnection
alias Clickhousex.Error
alias Clickhousex.HTTPClient, as: Client
defstruct conn_opts: [], base_address: "", conn: nil
@type state :: %__MODULE__{
conn_opts: Keyword.t(),
base_address: String.t(),
conn: Mint.HTTP.t()
}
@type query :: Clickhousex.Query.t()
@type result :: Clickhousex.Result.t()
@type cursor :: any
@ping_query Clickhousex.Query.new("SELECT 1") |> DBConnection.Query.parse([])
@ping_params DBConnection.Query.encode(@ping_query, [], [])
@doc false
@spec connect(opts :: Keyword.t()) :: {:ok, state} | {:error, Exception.t()}
def connect(opts) do
scheme = opts[:scheme] || :http
hostname = opts[:hostname] || "localhost"
port = opts[:port] || 8123
database = opts[:database] || "default"
username = opts[:username]
password = opts[:password]
timeout = opts[:timeout] || Clickhousex.timeout()
{:ok, conn} = Client.connect(scheme, hostname, port)
response = Client.request(conn, @ping_query, @ping_params, timeout, username, password, database)
with {:ok, conn, {:selected, _, _}} <- response do
conn_opts = [
scheme: scheme,
hostname: hostname,
port: port,
database: database,
username: username,
password: password,
timeout: timeout
]
state = %__MODULE__{
conn: conn,
conn_opts: conn_opts
}
{:ok, state}
end
end
@doc false
@spec disconnect(err :: Exception.t(), state) :: :ok
def disconnect(_err, _state) do
:ok
end
@doc false
@spec ping(state) ::
{:ok, state}
| {:disconnect, term, state}
def ping(state) do
case do_query(state.conn, @ping_query, @ping_params, [], state) do
{:ok, _, _, new_state} -> {:ok, new_state}
{:error, reason, new_state} -> {:disconnect, reason, new_state}
other -> other
end
end
@doc false
@spec reconnect(new_opts :: Keyword.t(), state) :: {:ok, state}
def reconnect(new_opts, state) do
with :ok <- disconnect("Reconnecting", state),
do: connect(new_opts)
end
@doc false
@spec checkin(state) :: {:ok, state}
def checkin(state) do
{:ok, state}
end
@doc false
@spec checkout(state) :: {:ok, state}
def checkout(state) do
{:ok, state}
end
@doc false
def handle_status(_, state) do
{:idle, state}
end
@doc false
@spec handle_prepare(query, Keyword.t(), state) :: {:ok, query, state}
def handle_prepare(query, _, state) do
{:ok, query, state}
end
@doc false
@spec handle_execute(query, list, opts :: Keyword.t(), state) ::
{:ok, result, state}
| {:error | :disconnect, Exception.t(), state}
def handle_execute(query, params, opts, state) do
do_query(state.conn, query, params, opts, state)
end
@doc false
def handle_declare(_query, _params, _opts, state) do
{:error, :cursors_not_supported, state}
end
@doc false
def handle_deallocate(_query, _cursor, _opts, state) do
{:error, :cursors_not_supported, state}
end
def handle_fetch(_query, _cursor, _opts, state) do
{:error, :cursors_not_supported, state}
end
@doc false
@spec handle_begin(opts :: Keyword.t(), state) :: {:ok, result, state}
def handle_begin(_opts, state) do
{:ok, %Clickhousex.Result{}, state}
end
@doc false
@spec handle_close(query, Keyword.t(), state) :: {:ok, result, state}
def handle_close(_query, _opts, state) do
{:ok, %Clickhousex.Result{}, state}
end
@doc false
@spec handle_commit(opts :: Keyword.t(), state) :: {:ok, result, state}
def handle_commit(_opts, state) do
{:ok, %Clickhousex.Result{}, state}
end
@doc false
@spec handle_info(opts :: Keyword.t(), state) :: {:ok, result, state}
def handle_info(_msg, state) do
{:ok, state}
end
@doc false
@spec handle_rollback(opts :: Keyword.t(), state) :: {:ok, result, state}
def handle_rollback(_opts, state) do
{:ok, %Clickhousex.Result{}, state}
end
defp do_query(conn, query, params, _opts, state) do
username = state.conn_opts[:username]
password = state.conn_opts[:password]
timeout = state.conn_opts[:timeout]
database = state.conn_opts[:database]
res =
conn
|> Client.request(query, params, timeout, username, password, database)
|> handle_errors()
case res do
{:error, conn, %Error{code: :connection_exception} = reason} ->
{:disconnect, reason, %{state | conn: conn}}
{:error, conn, reason} ->
{:error, reason, %{state | conn: conn}}
{:ok, conn, {:selected, columns, rows}} ->
{
:ok,
query,
%Clickhousex.Result{
command: :selected,
columns: columns,
rows: rows,
num_rows: Enum.count(rows)
},
%{state | conn: conn}
}
{:ok, conn, {:updated, count}} ->
{
:ok,
query,
%Clickhousex.Result{
command: :updated,
columns: ["count"],
rows: [[count]],
num_rows: 1
},
%{state | conn: conn}
}
{:ok, conn, {command, columns, rows}} ->
{
:ok,
query,
%Clickhousex.Result{
command: command,
columns: columns,
rows: rows,
num_rows: Enum.count(rows)
},
%{state | conn: conn}
}
end
end
@doc false
defp handle_errors({:error, conn, reason}) do
{:error, conn, Error.exception(reason)}
end
defp handle_errors(term), do: term
end
| 25.328889 | 101 | 0.586594 |
9e9f57923d742843efb580f69797b34b71648cab | 6,548 | ex | Elixir | clients/service_networking/lib/google_api/service_networking/v1/model/add_subnetwork_request.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/service_networking/lib/google_api/service_networking/v1/model/add_subnetwork_request.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/service_networking/lib/google_api/service_networking/v1/model/add_subnetwork_request.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceNetworking.V1.Model.AddSubnetworkRequest do
@moduledoc """
Request to create a subnetwork in a previously peered service network.
## Attributes
* `checkServiceNetworkingUsePermission` (*type:* `boolean()`, *default:* `nil`) - Optional. The IAM permission check determines whether the consumer project has 'servicenetworking.services.use' permission or not.
* `consumer` (*type:* `String.t`, *default:* `nil`) - Required. A resource that represents the service consumer, such as `projects/123456`. The project number can be different from the value in the consumer network parameter. For example, the network might be part of a Shared VPC network. In those cases, Service Networking validates that this resource belongs to that Shared VPC.
* `consumerNetwork` (*type:* `String.t`, *default:* `nil`) - Required. The name of the service consumer's VPC network. The network must have an existing private connection that was provisioned through the connections.create method. The name must be in the following format: `projects/{project}/global/networks/{network}`, where {project} is a project number, such as `12345`. {network} is the name of a VPC network in the project.
* `description` (*type:* `String.t`, *default:* `nil`) - Optional. Description of the subnet.
* `ipPrefixLength` (*type:* `integer()`, *default:* `nil`) - Required. The prefix length of the subnet's IP address range. Use CIDR range notation, such as `30` to provision a subnet with an `x.x.x.x/30` CIDR range. The IP address range is drawn from a pool of available ranges in the service consumer's allocated range.
* `outsideAllocationPublicIpRange` (*type:* `String.t`, *default:* `nil`) - Optional. Enable outside allocation using public IP addresses. Any public IP range may be specified. If this field is provided, we will not use customer reserved ranges for this primary IP range.
* `privateIpv6GoogleAccess` (*type:* `String.t`, *default:* `nil`) - Optional. The private IPv6 google access type for the VMs in this subnet. For information about the access types that can be set using this field, see [subnetwork](https://cloud.google.com/compute/docs/reference/rest/v1/subnetworks) in the Compute API documentation.
* `purpose` (*type:* `String.t`, *default:* `nil`) - Optional. Defines the purpose field of the subnet, e.g. 'PRIVATE_SERVICE_CONNECT'. For information about the purposes that can be set using this field, see [subnetwork](https://cloud.google.com/compute/docs/reference/rest/v1/subnetworks) in the Compute API documentation.
* `region` (*type:* `String.t`, *default:* `nil`) - Required. The name of a [region](/compute/docs/regions-zones) for the subnet, such `europe-west1`.
* `requestedAddress` (*type:* `String.t`, *default:* `nil`) - Optional. The starting address of a range. The address must be a valid IPv4 address in the x.x.x.x format. This value combined with the IP prefix range is the CIDR range for the subnet. The range must be within the allocated range that is assigned to the private connection. If the CIDR range isn't available, the call fails.
* `requestedRanges` (*type:* `list(String.t)`, *default:* `nil`) - Optional. The name of one or more allocated IP address ranges associated with this private service access connection. If no range names are provided all ranges associated with this connection will be considered. If a CIDR range with the specified IP prefix length is not available within these ranges, the call fails.
* `secondaryIpRangeSpecs` (*type:* `list(GoogleApi.ServiceNetworking.V1.Model.SecondaryIpRangeSpec.t)`, *default:* `nil`) - Optional. A list of secondary IP ranges to be created within the new subnetwork.
* `subnetwork` (*type:* `String.t`, *default:* `nil`) - Required. A name for the new subnet. For information about the naming requirements, see [subnetwork](/compute/docs/reference/rest/v1/subnetworks) in the Compute API documentation.
* `subnetworkUsers` (*type:* `list(String.t)`, *default:* `nil`) - A list of members that are granted the `compute.networkUser` role on the subnet.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:checkServiceNetworkingUsePermission => boolean() | nil,
:consumer => String.t() | nil,
:consumerNetwork => String.t() | nil,
:description => String.t() | nil,
:ipPrefixLength => integer() | nil,
:outsideAllocationPublicIpRange => String.t() | nil,
:privateIpv6GoogleAccess => String.t() | nil,
:purpose => String.t() | nil,
:region => String.t() | nil,
:requestedAddress => String.t() | nil,
:requestedRanges => list(String.t()) | nil,
:secondaryIpRangeSpecs =>
list(GoogleApi.ServiceNetworking.V1.Model.SecondaryIpRangeSpec.t()) | nil,
:subnetwork => String.t() | nil,
:subnetworkUsers => list(String.t()) | nil
}
field(:checkServiceNetworkingUsePermission)
field(:consumer)
field(:consumerNetwork)
field(:description)
field(:ipPrefixLength)
field(:outsideAllocationPublicIpRange)
field(:privateIpv6GoogleAccess)
field(:purpose)
field(:region)
field(:requestedAddress)
field(:requestedRanges, type: :list)
field(:secondaryIpRangeSpecs,
as: GoogleApi.ServiceNetworking.V1.Model.SecondaryIpRangeSpec,
type: :list
)
field(:subnetwork)
field(:subnetworkUsers, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceNetworking.V1.Model.AddSubnetworkRequest do
def decode(value, options) do
GoogleApi.ServiceNetworking.V1.Model.AddSubnetworkRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceNetworking.V1.Model.AddSubnetworkRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 71.173913 | 434 | 0.727856 |
9e9f8b732a7199c2ffb6957800549b0c18f839d1 | 1,774 | ex | Elixir | lib/ex_doc/markdown/earmark.ex | zoldar/ex_doc | f2ca5d1f4f85650b55460bc08e4d17a00f9f0d75 | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | lib/ex_doc/markdown/earmark.ex | zoldar/ex_doc | f2ca5d1f4f85650b55460bc08e4d17a00f9f0d75 | [
"Apache-2.0",
"CC-BY-4.0"
] | 2 | 2020-12-04T22:03:31.000Z | 2022-03-02T10:00:23.000Z | lib/ex_doc/markdown/earmark.ex | bonomali/ex_doc | d22cdad512fbfd8bfc44b417c680b1105c8aa19a | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | defmodule ExDoc.Markdown.Earmark do
@moduledoc """
ExDoc extension for the Earmark MarkDown parser.
"""
@behaviour ExDoc.Markdown
# Callback implementations
def assets(arg), do: ExDoc.Highlighter.assets(arg)
def before_closing_head_tag(arg), do: ExDoc.Highlighter.before_closing_head_tag(arg)
def before_closing_body_tag(arg), do: ExDoc.Highlighter.before_closing_body_tag(arg)
def configure(_), do: :ok
@doc """
Check if the Earmark Markdown parser module is available.
"""
def available? do
match?({:ok, _}, Application.ensure_all_started(:earmark)) and Code.ensure_loaded?(Earmark)
end
@doc """
Earmark specific options:
* `:gfm` - boolean. Turns on Github Flavored Markdown extensions. True by default
* `:breaks` - boolean. Only applicable if `gfm` is enabled. Makes all line
breaks significant (so every line in the input is a new line in the output)
* `:smartypants` - boolean. Turns on smartypants processing, so quotes become curly,
two or three hyphens become en and em dashes, and so on. False by default
* `:plugins` - map of strings to modules. Register custom plugins to be used with
Earmark. See [Plugins](http://github.com/pragdave/earmark#plugins) for details on
how to write custom plugins.
"""
def to_html(text, opts) do
options =
struct(Earmark.Options,
gfm: Keyword.get(opts, :gfm, true),
line: Keyword.get(opts, :line, 1),
file: Keyword.get(opts, :file),
breaks: Keyword.get(opts, :breaks, false),
smartypants: Keyword.get(opts, :smartypants, false),
plugins: Keyword.get(opts, :plugins, %{})
)
text
|> Earmark.as_html!(options)
|> ExDoc.Highlighter.highlight_code_blocks()
end
end
| 32.254545 | 95 | 0.687711 |
9e9fc76933db98c6c28d9f20ae268b0434700f9f | 98 | ex | Elixir | lib/bnb/repo.ex | hazen/bnb | 0a638423ee2903fe54456845fdad5ea90fe1ebc8 | [
"MIT"
] | null | null | null | lib/bnb/repo.ex | hazen/bnb | 0a638423ee2903fe54456845fdad5ea90fe1ebc8 | [
"MIT"
] | 2 | 2021-03-09T08:43:51.000Z | 2021-05-09T06:31:23.000Z | lib/bnb/repo.ex | hazen/bnb | 0a638423ee2903fe54456845fdad5ea90fe1ebc8 | [
"MIT"
] | null | null | null | defmodule Bnb.Repo do
use Ecto.Repo,
otp_app: :bnb,
adapter: Ecto.Adapters.Postgres
end
| 16.333333 | 35 | 0.704082 |
9e9fdc4da06e69aec818199fb23e59bcc40d51e8 | 3,285 | ex | Elixir | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_v1beta2_document_revision.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_v1beta2_document_revision.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_v1beta2_document_revision.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta2DocumentRevision do
@moduledoc """
Contains past or forward revisions of this document.
## Attributes
* `agent` (*type:* `String.t`, *default:* `nil`) - If the change was made by a person specify the name or id of that person.
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - The time that the revision was created.
* `humanReview` (*type:* `GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta2DocumentRevisionHumanReview.t`, *default:* `nil`) - Human Review information of this revision.
* `id` (*type:* `String.t`, *default:* `nil`) - Id of the revision. Unique within the context of the document.
* `parent` (*type:* `list(integer())`, *default:* `nil`) - The revisions that this revision is based on. This can include one or more parent (when documents are merged.) This field represents the index into the `revisions` field.
* `parentIds` (*type:* `list(String.t)`, *default:* `nil`) - The revisions that this revision is based on. Must include all the ids that have anything to do with this revision - eg. there are `provenance.parent.revision` fields that index into this field.
* `processor` (*type:* `String.t`, *default:* `nil`) - If the annotation was made by processor identify the processor by its resource name.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:agent => String.t() | nil,
:createTime => DateTime.t() | nil,
:humanReview =>
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta2DocumentRevisionHumanReview.t()
| nil,
:id => String.t() | nil,
:parent => list(integer()) | nil,
:parentIds => list(String.t()) | nil,
:processor => String.t() | nil
}
field(:agent)
field(:createTime, as: DateTime)
field(:humanReview,
as: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta2DocumentRevisionHumanReview
)
field(:id)
field(:parent, type: :list)
field(:parentIds, type: :list)
field(:processor)
end
defimpl Poison.Decoder,
for: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta2DocumentRevision do
def decode(value, options) do
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta2DocumentRevision.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta2DocumentRevision do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 43.223684 | 259 | 0.712329 |
9e9fdd14e5f23e37393a3f43c00adc01f05058a5 | 702 | ex | Elixir | lib/axon_web/gettext.ex | MattFerraro/Axon | 9c41c617ffd4a8dac89319d2f26f5736d9f96ca4 | [
"MIT"
] | null | null | null | lib/axon_web/gettext.ex | MattFerraro/Axon | 9c41c617ffd4a8dac89319d2f26f5736d9f96ca4 | [
"MIT"
] | null | null | null | lib/axon_web/gettext.ex | MattFerraro/Axon | 9c41c617ffd4a8dac89319d2f26f5736d9f96ca4 | [
"MIT"
] | null | null | null | defmodule AxonWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import AxonWeb.Gettext
# Simple translation
gettext("Here is the string to translate")
# Plural translation
ngettext("Here is the string to translate",
"Here are the strings to translate",
3)
# Domain-based translation
dgettext("errors", "Here is the error message to translate")
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :axon
end
| 28.08 | 72 | 0.673789 |
9e9fe9bd82b717cf4a2d4279a0e5817fbc1f1094 | 607 | exs | Elixir | day19/test/day19_test.exs | bjorng/advent-of-code-2017 | bd58a36864a4d82809253770f8a6d0c4e02cb59a | [
"Apache-2.0"
] | null | null | null | day19/test/day19_test.exs | bjorng/advent-of-code-2017 | bd58a36864a4d82809253770f8a6d0c4e02cb59a | [
"Apache-2.0"
] | null | null | null | day19/test/day19_test.exs | bjorng/advent-of-code-2017 | bd58a36864a4d82809253770f8a6d0c4e02cb59a | [
"Apache-2.0"
] | null | null | null | defmodule Day19Test do
use ExUnit.Case
doctest Day19
test "part 1 with example" do
assert Day19.part1(example()) == 'ABCDEF'
end
test "part 1 with my input" do
assert Day19.part1(input()) == 'GEPYAWTMLK'
end
test "part 2 with example" do
assert Day19.part2(example()) == 38
end
test "part 2 with my input" do
assert Day19.part2(input()) == 17628
end
def example() do
"""
|
| +--+
A | C
F---|----E|--+
| | | D
+B-+ +--+
"""
end
defp input do
File.read!('input.txt')
end
end
| 16.861111 | 47 | 0.510708 |
9e9ffbe98f622e76996c5aa4679b3a858a528bcb | 3,085 | exs | Elixir | alchemist-server/lib/helpers/module_info.exs | benjcal/vscode-elixir | ccf9457ebed29b80f8ac4f5e855cc9596d1546f6 | [
"MIT"
] | 221 | 2016-05-03T09:46:04.000Z | 2021-02-20T05:08:18.000Z | alchemist-server/lib/helpers/module_info.exs | benjcal/vscode-elixir | ccf9457ebed29b80f8ac4f5e855cc9596d1546f6 | [
"MIT"
] | 104 | 2017-02-28T16:26:12.000Z | 2021-02-19T22:31:03.000Z | alchemist-server/lib/helpers/module_info.exs | benjcal/vscode-elixir | ccf9457ebed29b80f8ac4f5e855cc9596d1546f6 | [
"MIT"
] | 59 | 2017-03-12T07:07:35.000Z | 2021-02-26T19:41:15.000Z | Code.require_file "../helpers/introspection.exs", __DIR__
defmodule Alchemist.Helpers.ModuleInfo do
@moduledoc false
def moduledoc?(module) do
case Code.get_docs module, :moduledoc do
{_, doc} -> is_binary doc
_ -> false
end
end
def docs?(module, function) do
docs = Code.get_docs module, :docs
do_docs?(docs, function)
end
def expand_alias([name | rest] = list, aliases) do
module = Module.concat(Elixir, name)
Enum.find_value(aliases, list, fn {alias, mod} ->
if alias === module do
case Atom.to_string(mod) do
"Elixir." <> mod ->
Module.concat [mod|rest]
_ ->
mod
end
end
end) |> normalize_module
end
def get_functions(module, hint) do
hint = to_string hint
{module, _} = Code.eval_string(module)
functions = get_module_funs(module)
list = Enum.reduce functions, [], fn({f, a}, acc) ->
case :lists.keyfind(f, 1, acc) do
{f, aa} -> :lists.keyreplace(f, 1, acc, {f, [a|aa]})
false -> [{f, [a]}|acc]
end
end
do_get_functions(list, hint) |> :lists.sort()
end
def has_function?(module, function) do
List.keymember? get_module_funs(module), function, 0
end
defp do_get_functions(list, "") do
all_functions(list)
end
defp do_get_functions(list, hint) do
all_functions(list, hint)
end
defp get_module_funs(module) do
case Code.ensure_loaded(module) do
{:module, _} ->
(module.module_info(:functions) |> filter_module_funs) ++ module.__info__(:macros)
_otherwise ->
[]
end
end
defp filter_module_funs(list) do
for fun = {f, _a} <- list, !(f |> Atom.to_string |> String.starts_with?(["MACRO-", "-"])) do
fun
end
end
defp all_functions(list) do
for {fun, arities} <- list do
for arity <- arities do
{fun, arity}
end
end |> List.flatten
end
defp all_functions(list, hint) do
for {fun, arities} <- list, name = Atom.to_string(fun), String.starts_with?(name, hint) do
for arity <- arities do
{fun, arity}
end
end |> List.flatten
end
def all_applications_modules do
for [app] <- loaded_applications(),
{:ok, modules} = :application.get_key(app, :modules),
module <- modules do
module
end
end
defp do_docs?([head|tail], function) do
{{func, _}, _, _, _, doc} = head
if func == function and is_binary(doc) do
true
else
do_docs?(tail, function)
end
end
defp do_docs?([], _function), do: false
defp do_docs?(nil, _function), do: false
defp loaded_applications do
# If we invoke :application.loaded_applications/0,
# it can error if we don't call safe_fixtable before.
# Since in both cases we are reaching over the
# application controller internals, we choose to match
# for performance.
:ets.match(:ac_tab, {{:loaded, :"$1"}, :_})
end
defp normalize_module(mod) do
if is_list(mod) do
Module.concat(mod)
else
mod
end
end
end
| 24.101563 | 96 | 0.612966 |
9ea001f555ca15645cdd96299e30199ef60e4aee | 621 | ex | Elixir | testData/org/elixir_lang/parser_definition/matched_dot_call_operation_parsing_test_case/DecimalFloat.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/matched_dot_call_operation_parsing_test_case/DecimalFloat.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/matched_dot_call_operation_parsing_test_case/DecimalFloat.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | 1.2e-3.()
1.2e-3.(function positional, key: value)
1.2e-3.(key_one: value_one, key_two: value_two)
1.2e-3.(
&one,
one <- two,
one when two,
one | two,
one = two,
one or two,
one || two,
one and two,
one && two,
one != two,
one < two,
one |> two,
one in two,
one ++ two,
one..two,
one + two,
one ^^^ two,
not one,
one.(),
Two.Three,
@one,
one,
@1,
&1,
!1,
1,
[],
"StringLine",
"""
String
Heredoc
""",
'CharListLine',
'''
CharList
Heredoc
''',
~x{sigil}modifiers,
nil,
:atom,
Alias
)
1.2e-3.(
one,
key: value
)
1.2e-3.(
one
)(
two
)
| 11.089286 | 47 | 0.504026 |
9ea00de68251e9517e6dd150513fe785cc7c8283 | 543 | ex | Elixir | apps/subs/lib/subs/use_case.ex | gitter-badger/opensubs.io | 76d5b4d355a530c8f496efe3ac2095d87f078997 | [
"MIT"
] | 36 | 2018-02-03T10:58:51.000Z | 2020-09-19T20:52:17.000Z | apps/subs/lib/subs/use_case.ex | joaquimadraz/subs | 9a26144ed660d5ece849ee447a9e5de53a311408 | [
"MIT"
] | 8 | 2018-01-17T17:15:48.000Z | 2020-07-06T08:56:54.000Z | apps/subs/lib/subs/use_case.ex | joaquimadraz/subs | 9a26144ed660d5ece849ee447a9e5de53a311408 | [
"MIT"
] | 10 | 2018-05-21T18:20:32.000Z | 2022-01-29T14:25:48.000Z | defmodule Subs.UseCase do
@moduledoc """
Helper functions to build our use cases.
Subs.UseCase.Flow
Adds ok! and failure! functions to create a standard way of returning
use case's context.
"""
def base do
quote do
import Subs.UseCase.Flow
end
end
defmacro __using__([]) do
apply(__MODULE__, :base, [])
end
end
defmodule Subs.UseCase.Flow do
@moduledoc false
def ok!(context \\ %{}) do
{:ok, context}
end
def failure!(status, context \\ %{}) do
{:error, {status, context}}
end
end
| 16.96875 | 71 | 0.64825 |
9ea014d21a63ff58d2ce6138a88e6d34b5d30137 | 4,516 | ex | Elixir | lib/ex_pesa/Mpesa/c2b.ex | MidigoF/ex_pesa | d2a62e72c67084385609c895e52a6ac52e8a9a8a | [
"AML",
"MIT"
] | 20 | 2020-08-07T18:45:03.000Z | 2021-12-02T12:47:07.000Z | lib/ex_pesa/Mpesa/c2b.ex | MidigoF/ex_pesa | d2a62e72c67084385609c895e52a6ac52e8a9a8a | [
"AML",
"MIT"
] | 65 | 2020-08-17T05:52:33.000Z | 2021-05-20T16:06:34.000Z | lib/ex_pesa/Mpesa/c2b.ex | MidigoF/ex_pesa | d2a62e72c67084385609c895e52a6ac52e8a9a8a | [
"AML",
"MIT"
] | 11 | 2020-08-17T07:53:02.000Z | 2021-04-02T20:57:16.000Z | defmodule ExPesa.Mpesa.C2B do
@moduledoc """
C2B M-Pesa API enables Paybill and Buy Goods merchants to integrate to M-Pesa and receive real time payments notifications.
"""
import ExPesa.Mpesa.MpesaBase
@doc """
There are two URLs required for RegisterURL API: Validation URL and Confirmation URL.
For the two URLs, below are some pointers. This will also apply to the Callback URLs used on other APIs:
- Use publicly available (Internet-accessible) IP addresses or domain names.
- Do not use the words MPesa, M-Pesa, Safaricom or any of their variants in either upper or lower cases in your URLs, the system filters these URLs out and blocks them. Of course any Localhost URL will be refused.
- Do not use public URL testers e.g. mockbin or requestbin especially on production, they are also blocked by the API.
## Parameters
attrs: - a map containing:
- `ShortCode` - This is your paybill number/till number, which you expect to receive payments notifications about.
- `ResponseType` - [Cancelled/Completed] This is the default action value that determines what MPesa will do in the scenario that
your endpoint is unreachable or is unable to respond on time. Only two values are allowed: Completed or Cancelled.
Completed means MPesa will automatically complete your transaction, whereas Cancelled means
MPesa will automatically cancel the transaction, in the event MPesa is unable to reach your Validation URL.
- `ConfirmationURL` - [confirmation URL].
- `ValidationURL` - [validation URL].
## Example
iex> ExPesa.Mpesa.C2B.registerurl(%{ConfirmationURL: "https://58cb49b30213.ngrok.io/confirmation", ValidationURL: "https://58cb49b30213.ngrok.io/validation", ResponseType: "Completed"})
{:ok,
%{
"ConversationID" => "",
"OriginatorCoversationID" => "",
"ResponseDescription" => "success"
}
}
"""
def registerurl(%{
ConfirmationURL: confirmation_url,
ValidationURL: validation_url,
ResponseType: response_type
}) do
paybill = Application.get_env(:ex_pesa, :mpesa)[:c2b_short_code]
payload = %{
"ShortCode" => paybill,
"ResponseType" => response_type,
"ConfirmationURL" => confirmation_url,
"ValidationURL" => validation_url
}
make_request("/mpesa/c2b/v1/registerurl", payload)
end
def registerurl(%{}) do
{:error, "Required Parameter missing, 'ConfirmationURL', 'ValidationURL','ResponseType'"}
end
@doc """
This API is used to make payment requests from Client to Business (C2B).
## Parameters
attrs: - a map containing:
- `CommandID` - This is a unique identifier of the transaction type: There are two types of these Identifiers:
CustomerPayBillOnline: This is used for Pay Bills shortcodes.
CustomerBuyGoodsOnline: This is used for Buy Goods shortcodes.
- `Amount` - This is the amount being transacted. The parameter expected is a numeric value.
- `Msisdn` - This is the phone number initiating the C2B transaction.
- `BillRefNumber` - This is used on CustomerPayBillOnline option only.
This is where a customer is expected to enter a unique bill identifier, e.g an Account Number.
- `ShortCode` - This is the Short Code receiving the amount being transacted.
You can use the sandbox provided test credentials down below to simulates a payment made from the client phone's STK/SIM Toolkit menu, and enables you to receive the payment requests in real time.
## Example
iex> ExPesa.Mpesa.C2B.simulate(%{command_id: "CustomerPayBillOnline", phone_number: "254728833100", amount: 10, bill_reference: "Some Reference" })
{:ok,
%{
"ConversationID" => "AG_20200921_00006e93a78f009f7025",
"OriginatorCoversationID" => "9769-145819182-2",
"ResponseDescription" => "Accept the service request successfully."
}
}
"""
def simulate(%{
command_id: command_id,
phone_number: phone_number,
amount: amount,
bill_reference: bill_reference
}) do
paybill = Application.get_env(:ex_pesa, :mpesa)[:c2b_short_code]
payload = %{
"ShortCode" => paybill,
"CommandID" => command_id,
"Amount" => amount,
"Msisdn" => phone_number,
"BillRefNumber" => bill_reference
}
make_request("/mpesa/c2b/v1/simulate", payload)
end
def simulate(%{}) do
{:error, "Required Parameter missing, 'CommandID','Amount','Msisdn', 'BillRefNumber'"}
end
end
| 41.814815 | 215 | 0.702391 |
9ea02bda64d32a02467a1173d809847b78f054be | 236 | ex | Elixir | lib/beacon_web/page_management.ex | treble37/beacon | 12cab02ec8bb072582ffaab60d9f21a65588e392 | [
"MIT"
] | 98 | 2021-12-22T04:37:58.000Z | 2022-02-27T22:53:25.000Z | lib/beacon_web/page_management.ex | treble37/beacon | 12cab02ec8bb072582ffaab60d9f21a65588e392 | [
"MIT"
] | 5 | 2021-12-24T01:10:11.000Z | 2022-03-25T17:35:37.000Z | lib/beacon_web/page_management.ex | treble37/beacon | 12cab02ec8bb072582ffaab60d9f21a65588e392 | [
"MIT"
] | 7 | 2021-12-24T01:09:36.000Z | 2022-03-01T14:46:41.000Z | defmodule BeaconWeb.PageManagement do
defmacro routes do
quote do
live("/pages", PageLive.Index, :index)
live("/pages/new", PageLive.Index, :new)
live("/page_editor/:id", PageEditorLive, :edit)
end
end
end
| 23.6 | 53 | 0.661017 |
9ea038f2860a1acb770200aa9f3046415e9f8f97 | 1,259 | ex | Elixir | lib/elm_phoenix_web_socket_example/application.ex | phollyer/elm-phoenix-websocket-example | 147da038b5ca4f9304924124c546284f12ecfaa8 | [
"BSD-3-Clause"
] | null | null | null | lib/elm_phoenix_web_socket_example/application.ex | phollyer/elm-phoenix-websocket-example | 147da038b5ca4f9304924124c546284f12ecfaa8 | [
"BSD-3-Clause"
] | 2 | 2020-12-29T15:13:39.000Z | 2020-12-30T01:01:02.000Z | lib/elm_phoenix_web_socket_example/application.ex | phollyer/elm-phoenix-websocket-example | 147da038b5ca4f9304924124c546284f12ecfaa8 | [
"BSD-3-Clause"
] | null | null | null | defmodule ElmPhoenixWebSocketExample.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
:ets.new(:rooms_table, [:named_table, :public])
:ets.new(:users_table, [:named_table, :public])
children = [
# Start the Telemetry supervisor
ElmPhoenixWebSocketExampleWeb.Telemetry,
# Start the PubSub system
{Phoenix.PubSub, name: ElmPhoenixWebSocketExample.PubSub},
ElmPhoenixWebSocketExampleWeb.Presence,
# Start the Endpoint (http/https)
ElmPhoenixWebSocketExampleWeb.Endpoint
# Start a worker by calling: ElmPhoenixWebSocketExample.Worker.start_link(arg)
# {ElmPhoenixWebSocketExample.Worker, arg}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: ElmPhoenixWebSocketExample.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
ElmPhoenixWebSocketExampleWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 34.027027 | 84 | 0.741064 |
9ea07d8af7619d890eed87f05588e6c513e2e98c | 307 | ex | Elixir | text_client/lib/procs/procs.ex | johncoleman83/elixir-for-programmers | 062d0a761e59b30909377694151655c946f48bb4 | [
"MIT"
] | null | null | null | text_client/lib/procs/procs.ex | johncoleman83/elixir-for-programmers | 062d0a761e59b30909377694151655c946f48bb4 | [
"MIT"
] | null | null | null | text_client/lib/procs/procs.ex | johncoleman83/elixir-for-programmers | 062d0a761e59b30909377694151655c946f48bb4 | [
"MIT"
] | null | null | null | defmodule Procs do
def greeter(count) do
receive do
{ :reset } ->
greeter(count - count)
{ :boom, reason } ->
exit(reason)
{ :add, n } ->
greeter(count + n)
{ :msg, msg } ->
IO.puts "#{count} #{msg}"
greeter(count + 1)
end
end
end
| 19.1875 | 33 | 0.465798 |
9ea0caf81942f3d9e24f4c99959d74b71c997778 | 2,855 | ex | Elixir | apps/core/lib/core/policies/repository.ex | pluralsh/plural | d33d08dee6348126334810e044b3699ef3f74819 | [
"Apache-2.0"
] | 59 | 2021-09-16T19:29:39.000Z | 2022-03-31T20:44:24.000Z | apps/core/lib/core/policies/repository.ex | pluralsh/plural | d33d08dee6348126334810e044b3699ef3f74819 | [
"Apache-2.0"
] | 111 | 2021-08-15T09:56:37.000Z | 2022-03-31T23:59:32.000Z | apps/core/lib/core/policies/repository.ex | pluralsh/plural | d33d08dee6348126334810e044b3699ef3f74819 | [
"Apache-2.0"
] | 4 | 2021-12-13T09:43:01.000Z | 2022-03-29T18:08:44.000Z | defmodule Core.Policies.Repository do
use Piazza.Policy
import Core.Policies.Utils
alias Core.Schema.{User, Installation, Repository, Integration, Artifact, DockerRepository, ApplyLock}
def can?(%User{} = user, %Integration{} = integ, policy) do
%{repository: repo} = Core.Repo.preload(integ, [:repository])
can?(user, repo, policy)
end
def can?(%User{} = user, %Artifact{} = art, policy) do
%{repository: repo} = Core.Repo.preload(art, [:repository])
can?(user, repo, policy)
end
def can?(%User{account_id: aid} = user, %Repository{} = repo, :support) do
case Core.Repo.preload(repo, [:publisher]) do
%{publisher: %{account_id: ^aid}} ->
check_rbac(user, :support, repository: repo.name)
_ -> {:error, :forbidden}
end
end
def can?(%User{account_id: aid}, %Repository{private: true} = repo, :access) do
case Core.Repo.preload(repo, [:publisher]) do
%{publisher: %{account_id: ^aid}} -> :continue
_ -> {:error, :forbidden}
end
end
def can?(%User{} = user, %DockerRepository{} = dkr, :edit) do
%{repository: repo} = Core.Repo.preload(dkr, [repository: [publisher: :account]])
can?(user, repo, :edit)
end
def can?(%User{} = user, %ApplyLock{owner_id: nil} = lock, :create) do
%{repository: repo} = Core.Repo.preload(lock, [repository: [publisher: :account]])
can?(user, repo, :edit)
end
def can?(_, %ApplyLock{inserted_at: ins, updated_at: upd}, :create) do
touched = upd || ins
Timex.now()
|> Timex.shift(minutes: -5)
|> Timex.before?(touched)
|> case do
true -> {:error, "lock already in use"}
false -> :pass
end
end
def can?(%User{}, %Repository{}, :access), do: :continue
def can?(%User{account_id: aid, id: user_id}, %Repository{} = repo, :pull) do
case Core.Repo.preload(repo, [:publisher]) do
%{publisher: %{account_id: ^aid}} -> :continue
%{publisher: %{owner_id: ^user_id}} -> :continue
_ ->
if Core.Services.Repositories.get_installation(user_id, repo.id),
do: :continue, else: {:error, :forbidden}
end
end
def can?(%User{} = user, %Repository{} = repo, action) when action in [:create, :edit] do
case Core.Repo.preload(repo, [publisher: :account]) do
%{publisher: pub} -> Core.Policies.Publisher.can?(user, pub, :edit)
_ -> {:error, :forbidden}
end
end
def can?(%User{id: user_id}, %Installation{user_id: user_id}, action) when action in [:edit, :access],
do: :continue
def can?(%User{} = user, %Installation{} = inst, :create) do
%{repository: repo} = Core.Repo.preload(inst, [:repository])
check_rbac(user, :install, repository: repo.name)
end
def can?(user, %Ecto.Changeset{} = cs, action),
do: can?(user, apply_changes(cs), action)
def can?(_, _, _), do: {:error, :forbidden}
end
| 34.39759 | 104 | 0.62627 |
9ea0cc4d80a9713b9dc606136a67b0e74373c115 | 647 | exs | Elixir | apps/definition_transform/mix.exs | rucker/hindsight | 876a5d344c5d8eebbea37684ee07e0a91e4430f0 | [
"Apache-2.0"
] | null | null | null | apps/definition_transform/mix.exs | rucker/hindsight | 876a5d344c5d8eebbea37684ee07e0a91e4430f0 | [
"Apache-2.0"
] | null | null | null | apps/definition_transform/mix.exs | rucker/hindsight | 876a5d344c5d8eebbea37684ee07e0a91e4430f0 | [
"Apache-2.0"
] | null | null | null | defmodule Transform.MixProject do
use Mix.Project
def project do
[
app: :definition_transform,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[
{:definition, in_umbrella: true},
{:definition_dictionary, in_umbrella: true},
{:dialyxir, "~> 1.0.0-rc.7", only: [:dev], runtime: false}
]
end
end
| 20.21875 | 64 | 0.548686 |
9ea0d1b43f510163605ed95c2d6f5c03eee9263c | 145 | ex | Elixir | MessagesBasic.ex | hectorip/ErlangExercises | 4a1aa5de0504da1bfe5a6c31c1d20277524ab363 | [
"MIT"
] | 4 | 2016-09-22T03:47:56.000Z | 2017-02-02T17:42:57.000Z | MessagesBasic.ex | hectorip/ErlangExercises | 4a1aa5de0504da1bfe5a6c31c1d20277524ab363 | [
"MIT"
] | null | null | null | MessagesBasic.ex | hectorip/ErlangExercises | 4a1aa5de0504da1bfe5a6c31c1d20277524ab363 | [
"MIT"
] | null | null | null | defmodule Spawn1 do
def greet do
receive do
{sender, msg} -> send sender, {:ok, "Hallo, #{msg}"}
end
end
end
| 18.125 | 64 | 0.510345 |
9ea0d2400afe760f6518d74cd78845664defdbb7 | 1,766 | exs | Elixir | test/config_test.exs | samullen/d | 1f3168ee8bd9fef4402edb528a394943ee31e3ab | [
"Unlicense"
] | null | null | null | test/config_test.exs | samullen/d | 1f3168ee8bd9fef4402edb528a394943ee31e3ab | [
"Unlicense"
] | null | null | null | test/config_test.exs | samullen/d | 1f3168ee8bd9fef4402edb528a394943ee31e3ab | [
"Unlicense"
] | null | null | null | defmodule ConfigTest do
use ExUnit.Case
import D.Config, only: [
parse_args: 1,
parse_config: 1,
]
describe "parse_args/1" do
test ":help returned by option parsing with -h and --help options" do
assert parse_args(["-h", "anything"]) == :help
assert parse_args(["--help", "anything"]) == :help
end
test "term and config returned when args provided" do
assert parse_args(~w{example}) == %{"term" => "example"}
end
end
describe "parse_config/1" do
test ":help is returned when :help is passed" do
assert parse_config(:help) == :help
end
test "passing term returns %{term: term, config}" do
default_config = %{"dictionary_api_key" => "your-key-here", "thesaurus_api_key" => "your-key-here"}
assert parse_config(%{"term" => "example"}) == Map.merge(%{"term" => "example"}, default_config)
end
test "it creates a .drc file in the configured drc path if nonexistent" do
drc_path = Path.expand(Application.get_env(:d, :drc_path))
File.rename(drc_path, "#{Path.dirname(drc_path)}/.drc_bak")
parse_config(%{"term" => "example"})
assert File.exists?(drc_path) == true
File.rename("#{Path.dirname(drc_path)}/.drc_bak", drc_path)
end
test "it sets defaults for created .drc" do
drc_path = Path.expand(Application.get_env(:d, :drc_path))
File.rename(drc_path, "#{Path.dirname(drc_path)}/.drc_bak")
default_config = """
[config]
dictionary_api_key = your-key-here
thesaurus_api_key = your-key-here
"""
parse_config(%{"term" => "example"})
assert File.read(drc_path) == {:ok, default_config}
File.rename("#{Path.dirname(drc_path)}/.drc_bak", drc_path)
end
end
end
| 31.535714 | 105 | 0.631937 |
9ea0ecdcab1b1cf5b31ce7ca3ff6eae0d7667379 | 1,819 | exs | Elixir | benchmarks/string_pattern_matching.exs | xiamx/lemma | 2b4d7bc459f57ba9cff0f79a90b3dc9a2379398d | [
"Apache-2.0"
] | 46 | 2017-09-12T16:49:24.000Z | 2021-06-09T06:44:15.000Z | benchmarks/string_pattern_matching.exs | xiamx/lemma | 2b4d7bc459f57ba9cff0f79a90b3dc9a2379398d | [
"Apache-2.0"
] | 1 | 2017-12-08T21:03:08.000Z | 2017-12-08T21:03:08.000Z | benchmarks/string_pattern_matching.exs | xiamx/lemma | 2b4d7bc459f57ba9cff0f79a90b3dc9a2379398d | [
"Apache-2.0"
] | 3 | 2017-12-08T20:57:07.000Z | 2019-11-29T01:05:42.000Z | defmodule StringPatternMatching do
def hash_of(i) do
(:crypto.hash(:sha, "#{i}") |> Base.encode16)
end
defmacro generate_lemma(num) do
for i <- 0..num do
root = hash_of(i)
quote do
def lemma(unquote(root) <> suffix) do
unquote(root) <> case suffix do
"s" -> ""
"ed" -> ""
"ing" -> ""
end
end
end
end
end
end
defmodule LemmaTen do
require StringPatternMatching
StringPatternMatching.generate_lemma(10)
end
IO.puts "lemma10 compiled"
defmodule Lemma100 do
require StringPatternMatching
StringPatternMatching.generate_lemma(100)
end
IO.puts "lemma100 compiled"
defmodule Lemma1000 do
require StringPatternMatching
StringPatternMatching.generate_lemma(1000)
end
IO.puts "lemma1000 compiled"
# defmodule Lemma10000 do
# require StringPatternMatching
# StringPatternMatching.generate_lemma(10000)
# end
# IO.puts "lemma10000 compiled"
# defmodule Lemma100000 do
# require StringPatternMatching
# StringPatternMatching.generate_lemma(100000)
# end
# defmodule Lemma1000000 do
# require StringPatternMatching
# StringPatternMatching.generate_lemma(1000000)
# end
Benchee.run(%{
"10 canonical words" => fn -> LemmaTen.lemma(StringPatternMatching.hash_of(5) <> "s") end,
"100 canonical words" => fn -> Lemma100.lemma(StringPatternMatching.hash_of(50) <> "s") end,
"1000 canonical words" => fn -> Lemma1000.lemma(StringPatternMatching.hash_of(900) <> "s") end,
# "10000 canonical words" => fn -> Lemma10000.lemma(StringPatternMatching.hash_of(5000) <> "s") end,
# "100000 canonical words" => fn -> Lemma1000.lemma(StringPatternMatching.hash_of(50000) <> "s") end,
# "1000000 canonical words" => fn -> Lemma10000.lemma(StringPatternMatching.hash_of(500000) <> "s") end,
}) | 26.75 | 106 | 0.698736 |
9ea1040e271a8e1f3904193adc196b58f667e2f0 | 946 | ex | Elixir | test/support/channel_case.ex | macypa/storeHall | 9fe810f763527cc87fa165138bdfb3cda92fa553 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | macypa/storeHall | 9fe810f763527cc87fa165138bdfb3cda92fa553 | [
"MIT"
] | 4 | 2019-07-16T06:24:42.000Z | 2021-05-07T22:26:14.000Z | test/support/channel_case.ex | macypa/storeHall | 9fe810f763527cc87fa165138bdfb3cda92fa553 | [
"MIT"
] | 1 | 2018-11-23T21:13:31.000Z | 2018-11-23T21:13:31.000Z | defmodule StoreHallWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint StoreHallWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(StoreHall.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(StoreHall.Repo, {:shared, self()})
end
:ok
end
end
| 24.894737 | 71 | 0.717759 |
9ea128725414c07420738c4b73cddec945bfe9db | 21,478 | ex | Elixir | sample_app/lib/my_app_web/phoenix_params.ex | tanyakavrakova/phoenix_params | 3d6a4522c7a9f3a6d55101fa45330c18f72933c1 | [
"MIT"
] | null | null | null | sample_app/lib/my_app_web/phoenix_params.ex | tanyakavrakova/phoenix_params | 3d6a4522c7a9f3a6d55101fa45330c18f72933c1 | [
"MIT"
] | null | null | null | sample_app/lib/my_app_web/phoenix_params.ex | tanyakavrakova/phoenix_params | 3d6a4522c7a9f3a6d55101fa45330c18f72933c1 | [
"MIT"
] | null | null | null | defmodule PhoenixParams do
@moduledoc """
A plug for Phoenix applications for validating HTTP request params.
Example usage:
defmodule ApiWeb.UserController do
use ApiWeb, :controller
plug Api.Plugs.Requests.User.Index when action in [:index]
def index(conn, params) do
# params is now a map with atom keys and transformed values
user = params.user
# ...
end
end
defmodule Api.Plugs.Requests.User.Index do
use Api.Plugs.Request, error_view: ApiWeb.ErrorView
param :format,
type: String,
default: "json",
in: ~w[json csv]
param :date,
type: Date,
required: true,
validator: &__MODULE__.validate_date/1
param :merchant_id,
type: Integer,
numericality: %{greater_than: 0}
param :email,
type: [String],
validaator: &__MODULE__.validate_email/1
global_validator &__MODULE__.ensure_mid_or_email/1
#
# Date validators
#
def validate_date(date) do
# return {:error, message} if invalid
# otherwise the validation passes
end
#
# Email validators
#
def validate_email({:error, _}), do: :noop
# Invoke on separate elements
def validate_email(list) when is_list(list) do
validate_each(list, &validate_email/1)
end
def validate_email(email) do
email =~ ~r/..../ || {:error, "is not a valid email address"}
end
#
# Global validators
#
def ensure_mid_or_email({:error, _}) do
params[:merchant_id] || params[:email] ||
{:error, "merchant id or email required"}
end
end
Supported types are:
* `String`
* `Integer`
* `Float`
* `Boolean`
* `Date`
* `DateTime`
Types can be wrapped in [], indicating the value is an array. Example:
* `[String]`
* `[Integer]`
* ...
Custom types are also supported. Example:
defmodule Requests.Index do
use Api.Plugs.Request
typedef Phone, &Coercers.phone/1
typedef Device, &Coercers.device/1
param :landline, type: Phone, required: true
param :device, type: Device
end
defmodule Coercers do
def phone(value) do
# transform your value here to anything
end
# ...
end
Nested types are also supported. Example:
defmodule Requests.Shared.Address do
param :country,
type: String,
required: true
# ...
end
defmodule Requests.Index do
param :address,
type: Requests.Shared.Address,
nested: true
end
Several OOTB validations exist:
- numericality - validates numbers.
Accepts a keyword list with :gt, :gte, :lt, :lte and/or :eq
- in - validates the presence of anything in a list
- length - validates length of a String.
Accepts a keyword list with :gt, :gte, :lt, :lte and/or :eq
- size - validates the number of elements in a list
The package is designed to be a "plug" and:
- it changes the input map's string keys to atoms
- it discards undefined params
- it changes (coerces) the values to whatever type they correspond to
This means that a definition like `param :age, type: Integer` will
transform an input `%{"name": "baba", "age": "79"}` to `%{age: 79}`
The original, unchanged params, are still accessible through
Plug's conn.body_params and conn.query_params.
- requires the below function to be defined in an Phoenix error view:
def render("400.json", %{conn: %{assigns: %{validation_failed: errors}}}) do
errors
end
When the type is specified as an array, (eg. `[Integer]`), the
validator will receive the entire array. This is done on purpose, but you
can take advantage of the exposed `validate_each/2` function to invoke it
on each element, returning properly formatted error message:
param :merchant_id,
type: [Integer],
required: true,
validator: &__MODULE__.checkmid/1
# Invoke validation on each separate element
def checkmid(list) when is_list(list) do
validate_each(list, params, &checkmid/2)
end
# Validate element
def checkmid(mid) do
mid > 0 || {:error, "must be positive"}
end
Errors reported by `validate_each` include which element failed validation:
"element at index 0: must be positive"
Finally, there is the `global_validator` macro, which allows you to define
a callback to be invoked if all individual parameter validations passed
successfully. This is useful in cases where the context validity is not
dictated by the sole value of a single parameter, but rather a combination.
E.g. mutually-exclusive params, at-least-one-of params, etc. are all example
cases in which the request entity itself is either valid or not.
The callback should accept exactly 1 argument -- the request params,
after coercion. Anything return value, different from {:error, reason} will
be considered a pass.
The single argument expected by the `__using__` macro is the error view
module (usually `YourAppNameWeb.ErrorView`)
TODO: add support for arrays of nested type params.
"""
defmacro param(name, opts) when is_atom(name) or (is_list(name) and length(name) == 1) do
quote location: :keep, bind_quoted: [name: name, opts: opts] do
{type, opts} = Keyword.pop(opts, :type)
typedef = Enum.find(@typedefs, &(elem(&1, 0) == type))
{validator, opts} = Keyword.pop(opts, :validator)
{required, opts} = Keyword.pop(opts, :required)
{default, opts} = Keyword.pop(opts, :default)
{nested, opts} = Keyword.pop(opts, :nested)
builtin_validators = opts
coercer = cond do
!typedef && (nested == true) ->
string_func_name = "&#{type}.validate/1"
{func_ref, []} = Code.eval_string(string_func_name)
func_ref
!typedef ->
raise "Unknown type: #{inspect(type)}"
true ->
elem(typedef, 1)
end
if Enum.any?(@paramdefs, &(elem(&1, 0) == name)) do
raise "Duplicate parameter: #{name}"
end
# Enum.each(builtin_validators, fn vname, vopts ->
# valid_builtin?(vname, vopts) || raise "Invalid options: #{inspect({vname, vopts})}"
# end)
if length(builtin_validators) > 1 || (validator && length(builtin_validators) > 0) do
raise "Specify either a custom validator or exactly one builtin validator"
end
param_opts = %{
type: type,
coercer: coercer,
validator: validator || List.first(builtin_validators),
required: required,
nested: nested,
default: default
}
@paramdefs {name, param_opts}
end
end
defmacro __before_compile__(_env) do
quote location: :keep do
defstruct Enum.map(@paramdefs, fn {name, opts} ->
{name, opts[:default]}
end)
def global_validators do
@global_validators |> Enum.reverse
end
def param_names do
Enum.reduce(@paramdefs, [], fn {name, opts}, acc -> [name | acc] end)
end
def paramdefs do
Map.new(@paramdefs)
end
def typedefs do
Map.new(@typedefs)
end
end
end
defmacro global_validator(func_ref, opts \\ []) do
opts = Keyword.merge([halt: false], opts)
quote location: :keep do
@global_validators {unquote(func_ref), unquote(opts[:halt])}
end
end
#
# Allow to define types:
#
# typedef Baba, &Kernel.inspect/1
#
defmacro typedef(coercer_name, coercer_ref) do
# Convert &Baba.Pena.foo/1 to "_array_baba_pena_foo"
# This is needed since the passed in coercer may be a remote function
# i.e. &Baba.Pena.my_coercer/1. If there is another custom type with
# a coercer with the same name, but scoped differently,
# i.e. &Baba.Gana.my_coercer/1, we need to be able to distinguish them
# uniquely, since both array coercers will be defined here and need to
# have unique names:
# &__MODULE__._array_baba_pena_my_coercer/1
# &__MODULE__._array_baba_gana_my_coercer/1
#
"&" <> string_func_name = Macro.to_string(coercer_ref)
{ns, [func]} = string_func_name |> String.split(".") |> Enum.split(-1)
[func, _arity] = String.split(func, "/")
# Coercer that works on a collection
local_coercer_name = ns |> Enum.map(&String.downcase/1) |> Enum.join("_")
ary_coercer_name = String.to_atom("_array_#{local_coercer_name}_#{func}")
quote location: :keep do
def unquote(ary_coercer_name)(list) when is_nil(list), do: list
def unquote(ary_coercer_name)(list) when not is_list(list), do: {:error, "not an array"}
def unquote(ary_coercer_name)(list) do
{i, res} =
Enum.reduce_while(list, {0, []}, fn x, {i, coerced_list} ->
case unquote(coercer_ref).(x) do
{:error, reason} -> {:halt, {i, {:error, reason}}}
value -> {:cont, {i + 1, [value | coerced_list]}}
end
end)
case res do
{:error, reason} -> {:error, "element at index #{i}: #{reason}"}
list -> Enum.reverse(list)
end
end
@typedefs {unquote(coercer_name), unquote(coercer_ref)}
ary_type_name = [unquote(coercer_name)]
{ary_coercer_ref, []} = Code.eval_string("&#{__MODULE__}.#{unquote(ary_coercer_name)}/1")
@typedefs {ary_type_name, ary_coercer_ref}
end
end
defmacro __using__(error_view: error_view) do
quote location: :keep do
import Plug.Conn
import unquote(__MODULE__)
Module.register_attribute(__MODULE__, :paramdefs, accumulate: true)
Module.register_attribute(__MODULE__, :typedefs, accumulate: true)
Module.register_attribute(__MODULE__, :global_validators, accumulate: true)
typedef String, &__MODULE__.coerce_string/1
typedef Integer, &__MODULE__.coerce_integer/1
typedef Float, &__MODULE__.coerce_float/1
typedef Boolean, &__MODULE__.coerce_boolean/1
typedef Date, &__MODULE__.coerce_date/1
typedef DateTime, &__MODULE__.coerce_datetime/1
def init(default), do: default
def validate(params) when not is_map(params), do: {:error, "invalid"}
def validate(params) do
params
|> extract
|> run_coercions
|> run_validations
|> conclude
|> maybe_run_global_validations
|> conclude
end
def extract(raw_params) do
Enum.reduce(param_names(), %{}, fn name, extracted ->
pdef = paramdefs()[name]
value = raw_params[to_string(name)]
value = is_nil(value) && pdef.default || value
Map.put(extracted, name, value)
end)
end
def run_coercions(params) do
Enum.reduce(params, params, fn {name, value}, coerced ->
pdef = paramdefs()[name]
case value do
nil ->
pdef.required && %{coerced | name => {:error, "required"}} || coerced
_ ->
case pdef.coercer.(value) do
{:ok, val} -> %{coerced | name => val}
val -> %{coerced | name => val}
end
end
end)
end
def run_validations(coerced_params) do
Enum.reduce(coerced_params, coerced_params, fn {name, value}, validated ->
pdef = paramdefs()[name]
cond do
is_nil(pdef.validator) ->
# no validator defined => don't validate
validated
is_nil(value) ->
# param was optional and value is nil => don't validate
validated
is_tuple(value) ->
# coercion failed => don't validate
validated
is_tuple(pdef.validator) ->
{vname, vopts} = pdef.validator
case run_builtin_validation(vname, vopts, value) do
{:error, reason} -> %{validated | name => {:error, reason}}
_ -> validated
end
is_function(pdef.validator) ->
case pdef.validator.(value) do
{:error, reason} -> %{validated | name => {:error, reason}}
_ -> validated
end
end
end)
end
def conclude(validated_params) do
errors = Enum.filter(validated_params, fn param ->
case param do
{nil, _} -> true # global validation failed
{_, {:error, _}} -> true # param validation or coercion failed
_ -> false
end
end)
Enum.any?(errors) && {:error, errors} || {:ok, validated_params}
end
def maybe_run_global_validations(validated_params) do
case validated_params do
{:error, params} ->
# Don't run global validations if individual params failed
params
{:ok, params} ->
errors = Enum.reduce_while(global_validators(), [], fn {validator, should_halt}, errors ->
case validator.(params) do
{:error, reason} ->
errors = errors ++ [reason]
should_halt && {:halt, errors} || {:cont, errors}
_ ->
{:cont, errors}
end
end)
Enum.any?(errors) && Map.put(params, nil, errors) || params
end
end
def call(conn, _) do
case validate(conn.params) do
{:error, errors} ->
errors = Enum.reduce(errors, [], &(validation_error(&1, &2)))
errors = length(errors) > 1 && errors || List.first(errors)
conn =
conn
|> put_status(400)
|> halt
|> Phoenix.Controller.render(unquote(error_view), "400.json", validation_failed: errors)
{:ok, params} ->
# NOTE: It's generally better to leave the original conn.params
# untouched. However, the phoenix framework passes this
# explicitly as the second param to any controller action,
# which will discourage anyone from manually having to fetch
# the coerced params stored in conn.private, so people
# will eventually forget about them and just start using the
# raw params.
# Plug.Conn.put_private(conn, :sumup_params, coerced_params)
Map.put(conn, :params, params)
end
end
def coercion_error?(param, {:error, _}), do: true
def coercion_error?(_), do: false
#
# Default coercers
#
def coerce_integer(v) when is_nil(v), do: v
def coerce_integer(v) when is_integer(v), do: v
def coerce_integer(v) when not is_bitstring(v), do: {:error, "not an integer"}
def coerce_integer(v) do
case Integer.parse(v) do
{i, ""} -> i
_ -> {:error, "not an integer"}
end
end
def coerce_float(v) when is_nil(v), do: v
def coerce_float(v) when is_float(v), do: v
def coerce_float(v) when not is_bitstring(v), do: {:error, "not a float"}
def coerce_float(v) do
case Float.parse(v) do
{i, ""} -> i
_ -> {:error, "not a float"}
end
end
def coerce_string(v) when is_nil(v), do: v
def coerce_string(v) when not is_bitstring(v), do: {:error, "not a string"}
def coerce_string(v), do: v
def coerce_date(v) when is_nil(v), do: v
def coerce_date(v) when not is_bitstring(v), do: {:error, "invalid date"}
def coerce_date(v) do
case Date.from_iso8601(v) do
{:ok, d} -> d
{:error, _} -> {:error, "invalid date"}
end
end
def coerce_datetime(v) when is_nil(v), do: v
def coerce_datetime(v) when not is_bitstring(v), do: {:error, "invalid datetime"}
def coerce_datetime(v) do
case DateTime.from_iso8601(v) do
{:ok, dt, _} -> dt
{:error, _} -> {:error, "invalid datetime"}
end
end
def coerce_atom(v) when is_bitstring(v), do: String.to_atom(v)
def coerce_atom(v), do: {:error, "string expected"}
def coerce_boolean(v) when is_nil(v), do: v
def coerce_boolean(v) when is_boolean(v), do: v
def coerce_boolean(v) when v in ["true", "false"], do: String.to_existing_atom(v)
def coerce_boolean(v), do: {:error, "not a boolean"}
#
# This validator is to be invoked manually in custom validators.
# E.g.
# def my_validator(list) when is_list(list), do: validate_each(list, &my_validator/1)
# def my_validator(value) do
# value == 5 || {:error, "is not 5"}
# end
#
def validate_each(list, validator) do
{i, res} =
Enum.reduce_while(list, {0, nil}, fn x, {i, nil} ->
case validator.(x) do
{:error, reason} -> {:halt, {i, {:error, reason}}}
_ -> {:cont, {i + 1, nil}}
end
end)
case res do
{:error, reason} -> {:error, "element at index #{i}: #{reason}"}
_ -> true
end
end
#
# Builtin validations
#
def run_builtin_validation(:numericality, opts, value) do
with true <- !Map.has_key?(opts, :gt) || value > opts.gt || "must be > #{opts.gt}",
true <- !Map.has_key?(opts, :gte) || value >= opts.gte || "must be >= #{opts.gte}",
true <- !Map.has_key?(opts, :lt) || value < opts.lt || "must be < #{opts.lt}",
true <- !Map.has_key?(opts, :lte) || value <= opts.lte || "must be <= #{opts.lte}",
true <- !Map.has_key?(opts, :eq) || value == opts.eq || "must be == #{opts.eq}"
do
true
else
message -> {:error, message}
end
end
def run_builtin_validation(:in, values, value) do
Enum.member?(values, value) || {:error, "allowed values: #{inspect(values)}"}
end
def run_builtin_validation(:length, opts, value) when is_bitstring(value) do
with true <- !Map.has_key?(opts, :gt) || String.length(value) > opts.gt || "must be more than #{opts.gt} chars",
true <- !Map.has_key?(opts, :gte) || String.length(value) >= opts.gte || "must be at least #{opts.gte} chars",
true <- !Map.has_key?(opts, :lt) || String.length(value) < opts.lt || "must be less than #{opts.lt} chars",
true <- !Map.has_key?(opts, :lte) || String.length(value) <= opts.lte || "must at most #{opts.lte} chars",
true <- !Map.has_key?(opts, :eq) || String.length(value) == opts.eq || "must be exactly #{opts.eq} chars"
do
true
else
message -> {:error, message}
end
end
def run_builtin_validation(:size, opts, value) when is_list(value) do
with true <- !Map.has_key?(opts, :gt) || length(value) > opts.gt || "must contain more than #{opts.gt} elements",
true <- !Map.has_key?(opts, :gte) || length(value) >= opts.gte || "must contain at least #{opts.gte} elements",
true <- !Map.has_key?(opts, :lt) || length(value) < opts.lt || "must contain less than #{opts.lt} elements",
true <- !Map.has_key?(opts, :lte) || length(value) <= opts.lte || "must contain at most #{opts.lte} elements",
true <- !Map.has_key?(opts, :eq) || length(value) == opts.eq || "must contain exactly #{opts.eq} elements"
do
true
else
message -> {:error, message}
end
end
def run_builtin_validation(:regex, pattern, value) do
Regex.match?(pattern, value) || {:error, "invalid format"}
end
#
# Error formatter
#
# Global validation errors are stored under a nil key and are a list
# of messages
defp validation_error({nil, list}, errors) when is_list(list) do
Enum.reduce(list, errors, &(validation_error({nil, &1}, &2)))
end
# Nested validation errors are stored under a param key and are a
# (keyword) list of {name, {:error, msg}} (or {nil, list} like above)
defp validation_error({name, {:error, list}}, errors) when is_list(list) do
Enum.reduce(list, errors, fn {k, v}, acc ->
nested_name = k && "#{name}.#{k}" || name
validation_error({nested_name, v}, acc)
end)
end
# Regular validation errors are stored under a param key and are
# a tuple {:error, msg}
defp validation_error({name, {:error, message}}, errors) do
validation_error({name, message}, errors)
end
defp validation_error({name, message}, errors) do
[validation_error(name, message) | errors]
end
defp validation_error(nil, message) do
%{error_code: "INVALID", message: "Validation error: #{message}"}
end
defp validation_error(name, message) do
code = (message == "required") && "MISSING" || "INVALID"
%{error_code: code, param: name, message: "Validation error: #{message}"}
end
@before_compile unquote(__MODULE__)
end
end
end
| 33.559375 | 124 | 0.582317 |
9ea132304b072805fc12c58dc7bc6ed4b28fa75e | 1,059 | ex | Elixir | lib/maze.ex | korczis/maze | 128ca3958c9e62c4fdc53efa72799283d66b056d | [
"MIT"
] | null | null | null | lib/maze.ex | korczis/maze | 128ca3958c9e62c4fdc53efa72799283d66b056d | [
"MIT"
] | 3 | 2016-11-22T20:23:00.000Z | 2016-11-22T20:23:20.000Z | lib/maze.ex | korczis/maze | 128ca3958c9e62c4fdc53efa72799283d66b056d | [
"MIT"
] | null | null | null | defmodule Maze do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
worker(Maze.Users, [:"users.dets"]),
# Start the Ecto repository
supervisor(Maze.Repo, []),
# Start the endpoint when the application starts
supervisor(Maze.Endpoint, []),
# Start your own worker by calling: Maze.Worker.start_link(arg1, arg2, arg3)
# worker(Maze.Worker, [arg1, arg2, arg3]),
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Maze.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
Maze.Endpoint.config_change(changed, removed)
:ok
end
end
| 32.090909 | 82 | 0.700661 |
9ea14f76ae84ea47fad18ffc7b76fcc12c4df58e | 9,369 | ex | Elixir | clients/service_broker/lib/google_api/service_broker/v1/api/v1.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/service_broker/lib/google_api/service_broker/v1/api/v1.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/service_broker/lib/google_api/service_broker/v1/api/v1.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.ServiceBroker.V1.Api.V1 do
@moduledoc """
API calls for all endpoints tagged `V1`.
"""
alias GoogleApi.ServiceBroker.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set.
## Parameters
- connection (GoogleApi.ServiceBroker.V1.Connection): Connection to server
- v1_id (String.t): Part of `resource`. REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
## Returns
{:ok, %GoogleApi.ServiceBroker.V1.Model.GoogleIamV1Policy{}} on success
{:error, info} on failure
"""
@spec servicebroker_get_iam_policy(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.ServiceBroker.V1.Model.GoogleIamV1Policy.t()} | {:error, Tesla.Env.t()}
def servicebroker_get_iam_policy(connection, v1_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{v1Id}:getIamPolicy", %{
"v1Id" => URI.encode(v1_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.ServiceBroker.V1.Model.GoogleIamV1Policy{}])
end
@doc """
Sets the access control policy on the specified resource. Replaces any existing policy.
## Parameters
- connection (GoogleApi.ServiceBroker.V1.Connection): Connection to server
- v1_id (String.t): Part of `resource`. REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (GoogleIamV1SetIamPolicyRequest):
## Returns
{:ok, %GoogleApi.ServiceBroker.V1.Model.GoogleIamV1Policy{}} on success
{:error, info} on failure
"""
@spec servicebroker_set_iam_policy(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.ServiceBroker.V1.Model.GoogleIamV1Policy.t()} | {:error, Tesla.Env.t()}
def servicebroker_set_iam_policy(connection, v1_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{v1Id}:setIamPolicy", %{
"v1Id" => URI.encode(v1_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.ServiceBroker.V1.Model.GoogleIamV1Policy{}])
end
@doc """
Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a NOT_FOUND error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may \"fail open\" without warning.
## Parameters
- connection (GoogleApi.ServiceBroker.V1.Connection): Connection to server
- v1_id (String.t): Part of `resource`. REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (GoogleIamV1TestIamPermissionsRequest):
## Returns
{:ok, %GoogleApi.ServiceBroker.V1.Model.GoogleIamV1TestIamPermissionsResponse{}} on success
{:error, info} on failure
"""
@spec servicebroker_test_iam_permissions(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.ServiceBroker.V1.Model.GoogleIamV1TestIamPermissionsResponse.t()}
| {:error, Tesla.Env.t()}
def servicebroker_test_iam_permissions(connection, v1_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{v1Id}:testIamPermissions", %{
"v1Id" => URI.encode(v1_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.ServiceBroker.V1.Model.GoogleIamV1TestIamPermissionsResponse{}]
)
end
end
| 47.080402 | 362 | 0.68748 |
9ea159597c0a190053c1374bfb0fcd49482d8281 | 582 | ex | Elixir | lib/flagr/model/constraint.ex | brexhq/exflagr | 9d4794aaf678831187c26341f5a2767d5535e2d6 | [
"MIT"
] | null | null | null | lib/flagr/model/constraint.ex | brexhq/exflagr | 9d4794aaf678831187c26341f5a2767d5535e2d6 | [
"MIT"
] | null | null | null | lib/flagr/model/constraint.ex | brexhq/exflagr | 9d4794aaf678831187c26341f5a2767d5535e2d6 | [
"MIT"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Flagr.Model.Constraint do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"id",
:"property",
:"operator",
:"value"
]
@type t :: %__MODULE__{
:"id" => integer(),
:"property" => String.t,
:"operator" => String.t,
:"value" => String.t
}
end
defimpl Poison.Decoder, for: Flagr.Model.Constraint do
def decode(value, _options) do
value
end
end
| 18.1875 | 75 | 0.628866 |
9ea1b4376ca98f45e30a4f2c033554eb0ab1cdfc | 190 | ex | Elixir | lib/silver/utils.ex | elixirdrops/silver | 38bfc34e5e0b8b49c9f6eee87b9181141ed2535f | [
"MIT"
] | 3 | 2016-06-27T04:32:02.000Z | 2018-03-03T06:00:12.000Z | lib/silver/utils.ex | elixirdrops/silver | 38bfc34e5e0b8b49c9f6eee87b9181141ed2535f | [
"MIT"
] | null | null | null | lib/silver/utils.ex | elixirdrops/silver | 38bfc34e5e0b8b49c9f6eee87b9181141ed2535f | [
"MIT"
] | null | null | null | defmodule Silver.Utils do
def to_cents(amount) do
(amount * 100) |> trunc
end
def timestamp do
{_date, time} = :calendar.local_time
:calendar.time_to_seconds(time)
end
end | 19 | 39 | 0.694737 |
9ea1f2b5f2e735b02c6618e1619c8b4c309c916c | 1,169 | exs | Elixir | test/anti_entropy_test.exs | evadne/delta_crdt_ex | 3ec6940edd6fdf5ee047f5e402e4054f4ad678c9 | [
"MIT"
] | 1 | 2019-11-11T16:58:03.000Z | 2019-11-11T16:58:03.000Z | test/anti_entropy_test.exs | evadne/delta_crdt_ex | 3ec6940edd6fdf5ee047f5e402e4054f4ad678c9 | [
"MIT"
] | null | null | null | test/anti_entropy_test.exs | evadne/delta_crdt_ex | 3ec6940edd6fdf5ee047f5e402e4054f4ad678c9 | [
"MIT"
] | null | null | null | defmodule AntiEntropyTest do
use ExUnit.Case, async: true
use ExUnitProperties
alias DeltaCrdt.{CausalContext, AntiEntropy}
describe ".is_strict_expansion/2" do
test "strict expansion" do
c = CausalContext.new([{1, 0}, {1, 1}]) |> CausalContext.compress()
delta_c = CausalContext.new([{1, 2}]) |> CausalContext.compress()
assert true == AntiEntropy.is_strict_expansion(c, delta_c)
assert true == AntiEntropy.is_strict_expansion(delta_c, c)
end
test "not an expansion" do
c = CausalContext.new([{1, 0}, {1, 1}]) |> CausalContext.compress()
delta_c = CausalContext.new([{1, 3}]) |> CausalContext.compress()
assert false == AntiEntropy.is_strict_expansion(c, delta_c)
assert true == AntiEntropy.is_strict_expansion(delta_c, c)
end
test "not an expansion in both directions" do
c = CausalContext.new([{1, 0}, {1, 1}, {2, 4}]) |> CausalContext.compress()
delta_c = CausalContext.new([{1, 3}, {2, 2}]) |> CausalContext.compress()
assert false == AntiEntropy.is_strict_expansion(c, delta_c)
assert false == AntiEntropy.is_strict_expansion(delta_c, c)
end
end
end
| 35.424242 | 81 | 0.67237 |
9ea1fd76700d6599aa0f907b1aa7174cb2c7a45f | 1,271 | exs | Elixir | spec/assertions/result/be_error_result_spec.exs | MeneDev/espec | ec4b3d579c5192999e930224a8a2650bb1fdf0bc | [
"Apache-2.0"
] | 807 | 2015-03-25T14:00:19.000Z | 2022-03-24T08:08:15.000Z | spec/assertions/result/be_error_result_spec.exs | MeneDev/espec | ec4b3d579c5192999e930224a8a2650bb1fdf0bc | [
"Apache-2.0"
] | 254 | 2015-03-27T10:12:25.000Z | 2021-07-12T01:40:15.000Z | spec/assertions/result/be_error_result_spec.exs | MeneDev/espec | ec4b3d579c5192999e930224a8a2650bb1fdf0bc | [
"Apache-2.0"
] | 85 | 2015-04-02T10:25:19.000Z | 2021-01-30T21:30:43.000Z | defmodule ESpec.Assertions.BeErrorResultSpec do
use ESpec, async: true
describe "ESpec.Assertions" do
let :error_result, do: {:error, :reason}
let :ok_result, do: {:ok, :result}
context "Success" do
it "checks success with `to`" do
message = expect(error_result()) |> to(be_error_result())
expect(message) |> to(eq "`{:error, :reason}` is a error result.")
end
it "checks success with `not_to`" do
message = expect(ok_result()) |> not_to(be_error_result())
expect(message) |> to(eq "`{:ok, :result}` isn't a error result.")
end
end
context "Errors" do
context "with `to`" do
before do
{:shared,
expectation: fn -> expect(ok_result()) |> to(be_error_result()) end,
message: "Expected `{:ok, :result}` to be a error result but it is not."}
end
it_behaves_like(CheckErrorSharedSpec)
end
context "with `not_to`" do
before do
{:shared,
expectation: fn -> expect(error_result()) |> not_to(be_error_result()) end,
message: "Expected `{:error, :reason}` not to be a error result but it is."}
end
it_behaves_like(CheckErrorSharedSpec)
end
end
end
end
| 29.55814 | 87 | 0.590087 |
9ea226ae0f48c4a70956bf2be29b4abf393ff934 | 941 | exs | Elixir | test/hlds_logs/log_entry_test.exs | JonnyPower/hlds_logs | be0b58bc5f482da7c6f31cfe3e699875d40ae719 | [
"MIT"
] | 2 | 2021-09-12T00:11:44.000Z | 2022-02-13T20:45:40.000Z | test/hlds_logs/log_entry_test.exs | JonnyPower/hlds_logs | be0b58bc5f482da7c6f31cfe3e699875d40ae719 | [
"MIT"
] | 1 | 2018-11-19T04:47:38.000Z | 2018-11-19T04:47:38.000Z | test/hlds_logs/log_entry_test.exs | JonnyPower/hlds_logs | be0b58bc5f482da7c6f31cfe3e699875d40ae719 | [
"MIT"
] | null | null | null | defmodule HLDSLogs.LogEntryTest do
use ExUnit.Case
doctest HLDSLogs.LogEntry
test "sample produces entries" do
File.stream!("test/test_sample.log")
|> Stream.map(&HLDSLogs.LogEntry.from/1)
|> Stream.map(fn result ->
assert result.datetime != nil
assert result.body != nil
end)
|> Stream.run
end
test "datetime" do
entry = HLDSLogs.LogEntry.from(
"L 05/10/2018 - 18:14:39: \"player18<7><STEAM_0:0:0000000000><marine1team>\" triggered \"structure_built\" (type \"resourcetower\")"
)
assert entry.datetime == ~N[2018-05-10 18:14:39]
end
test "body" do
entry = HLDSLogs.LogEntry.from(
"L 05/10/2018 - 18:14:39: \"player18<7><STEAM_0:0:0000000000><marine1team>\" triggered \"structure_built\" (type \"resourcetower\")"
)
assert entry.body == "\"player18<7><STEAM_0:0:0000000000><marine1team>\" triggered \"structure_built\" (type \"resourcetower\")"
end
end
| 31.366667 | 138 | 0.664187 |
9ea247ac471472577eebf6499fd22a4dbe0242bd | 3,866 | exs | Elixir | test/hornet/scheduler_test.exs | andreaswillems/hornet | a61e4518afd45ae5f2cf7ceb8c80527a7be0a174 | [
"MIT"
] | null | null | null | test/hornet/scheduler_test.exs | andreaswillems/hornet | a61e4518afd45ae5f2cf7ceb8c80527a7be0a174 | [
"MIT"
] | null | null | null | test/hornet/scheduler_test.exs | andreaswillems/hornet | a61e4518afd45ae5f2cf7ceb8c80527a7be0a174 | [
"MIT"
] | null | null | null | defmodule Hornet.SchedulerTest do
use ExUnit.Case
alias Hornet.Scheduler
alias Hornet.RateCounter
test "starts workers which maintain rate with one worker" do
func = fn ->
:ok
end
params = [id: :test, func: func, rate: 5]
{:ok, _pid} = Scheduler.start_link(params)
Process.sleep(5_000)
state = Scheduler.state(params[:id])
rate = RateCounter.rate(state.rate_counter)
assert params[:rate] == rate
:ok = Scheduler.stop(params[:id])
end
test "starts workers which maintain rate with multiple workers" do
func = fn ->
:ok
end
params = [id: :test1, func: func, rate: 1_000]
{:ok, _pid} = Scheduler.start_link(params)
Process.sleep(5_000)
state = Scheduler.state(params[:id])
rate = RateCounter.rate(state.rate_counter)
assert params[:rate] == rate
assert 100 == state.current_workers_count
:ok = Scheduler.stop(params[:id])
end
test "starts many workers" do
func = fn ->
:ok
end
params = [id: :test2, func: func, rate: 100_000]
{:ok, _pid} = Scheduler.start_link(params)
Process.sleep(5_000)
state = Scheduler.state(params[:id])
rate = RateCounter.rate(state.rate_counter)
assert_rates(params[:rate], rate)
assert 10_000 == state.current_workers_count
:ok = Scheduler.stop(params[:id])
end
test "adjusts period" do
func = fn ->
Process.sleep(250)
end
params = [id: :test3, func: func, rate: 5]
{:ok, _pid} = Scheduler.start_link(params)
state = Scheduler.state(params[:id])
assert state.current_workers_count == 1
assert state.period == 100
Process.sleep(6_000)
new_state = Scheduler.state(params[:id])
assert new_state.current_workers_count == 1
assert new_state.period == 150
:ok = Scheduler.stop(params[:id])
end
test "adjust the number of workers" do
func = fn ->
Process.sleep(500)
end
params = [
id: :test4,
func: func,
rate: 2,
adjust_period: 2_000,
adjust_step: 200,
start_period: 600
]
{:ok, _pid} = Scheduler.start_link(params)
state = Scheduler.state(params[:id])
assert state.current_workers_count == 1
assert state.period == 600
Process.sleep(10_000)
new_state = Scheduler.state(params[:id])
assert new_state.current_workers_count == 2
rate = RateCounter.rate(state.rate_counter)
assert params[:rate] == rate
:ok = Scheduler.stop(params[:id])
end
test "does not adjust the number of workers if process number limit is set" do
func = fn ->
Process.sleep(400)
end
params = [
id: :test4,
func: func,
rate: 4,
adjust_period: 3_000,
adjust_step: 300,
start_period: 100,
process_number_limit: 1
]
{:ok, _pid} = Scheduler.start_link(params)
state = Scheduler.state(params[:id])
assert state.current_workers_count == 1
assert state.period == 100
Process.sleep(10_000)
new_state = Scheduler.state(params[:id])
assert new_state.current_workers_count == 1
assert new_state.period == 100
:ok = Scheduler.stop(params[:id])
end
test "stops scheduler and all child processes" do
func = fn ->
:ok
end
params = [id: :test5, func: func, rate: 5]
{:ok, _pid} = Scheduler.start_link(params)
state = Scheduler.state(params[:id])
Scheduler.stop(params[:id])
Process.sleep(2_000)
refute Process.alive?(state.supervisor)
refute Process.alive?(state.rate_counter)
refute Process.alive?(state.worker_supervisor)
end
defp assert_rates(expected, actual, percentage \\ 0.1) do
error_rate = expected * percentage
if expected > actual do
assert expected - actual < error_rate
else
assert actual - expected < error_rate
end
end
end
| 20.784946 | 80 | 0.640714 |
9ea275dbe2c5f96ccb333310e952ca1f2c2466e2 | 1,115 | ex | Elixir | test/support/conn_case.ex | HandOfGod94/my_phoenix_api_template | 448db20ba47872f37097754aec454aed440246b8 | [
"MIT"
] | null | null | null | test/support/conn_case.ex | HandOfGod94/my_phoenix_api_template | 448db20ba47872f37097754aec454aed440246b8 | [
"MIT"
] | null | null | null | test/support/conn_case.ex | HandOfGod94/my_phoenix_api_template | 448db20ba47872f37097754aec454aed440246b8 | [
"MIT"
] | null | null | null | defmodule MyPhoenixApiTemplateWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias MyPhoenixApiTemplateWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint MyPhoenixApiTemplateWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(MyPhoenixApiTemplate.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(MyPhoenixApiTemplate.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 28.589744 | 82 | 0.734529 |
9ea2fcf7b5e48b7886271ed96b2513dd2abcf62a | 7,201 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/snapshot.ex | MechimCook/elixir-google-api | 0240ede69ec77115076724d223f9d1f849ff1d6b | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/snapshot.ex | MechimCook/elixir-google-api | 0240ede69ec77115076724d223f9d1f849ff1d6b | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/snapshot.ex | MechimCook/elixir-google-api | 0240ede69ec77115076724d223f9d1f849ff1d6b | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.Snapshot do
@moduledoc """
A persistent disk snapshot resource. (== resource_for beta.snapshots ==) (== resource_for v1.snapshots ==)
## Attributes
- creationTimestamp (String.t): [Output Only] Creation timestamp in RFC3339 text format. Defaults to: `null`.
- description (String.t): An optional description of this resource. Provide this property when you create the resource. Defaults to: `null`.
- diskSizeGb (String.t): [Output Only] Size of the snapshot, specified in GB. Defaults to: `null`.
- id (String.t): [Output Only] The unique identifier for the resource. This identifier is defined by the server. Defaults to: `null`.
- kind (String.t): [Output Only] Type of the resource. Always compute#snapshot for Snapshot resources. Defaults to: `null`.
- labelFingerprint (binary()): A fingerprint for the labels being applied to this snapshot, which is essentially a hash of the labels set used for optimistic locking. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels, otherwise the request will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve a snapshot. Defaults to: `null`.
- labels (%{optional(String.t) => String.t}): Labels to apply to this snapshot. These can be later modified by the setLabels method. Label values may be empty. Defaults to: `null`.
- licenseCodes ([String.t]): [Output Only] Integer license codes indicating which licenses are attached to this snapshot. Defaults to: `null`.
- licenses ([String.t]): [Output Only] A list of public visible licenses that apply to this snapshot. This can be because the original image had licenses attached (such as a Windows image). Defaults to: `null`.
- name (String.t): Name of the resource; provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. Defaults to: `null`.
- selfLink (String.t): [Output Only] Server-defined URL for the resource. Defaults to: `null`.
- snapshotEncryptionKey (CustomerEncryptionKey): Encrypts the snapshot using a customer-supplied encryption key. After you encrypt a snapshot using a customer-supplied key, you must provide the same key if you use the snapshot later. For example, you must provide the encryption key when you create a disk from the encrypted snapshot in a future request. Customer-supplied encryption keys do not protect access to metadata of the snapshot. If you do not provide an encryption key when creating the snapshot, then the snapshot will be encrypted using an automatically generated key and you do not need to provide a key to use the snapshot later. Defaults to: `null`.
- sourceDisk (String.t): [Output Only] The source disk used to create this snapshot. Defaults to: `null`.
- sourceDiskEncryptionKey (CustomerEncryptionKey): The customer-supplied encryption key of the source disk. Required if the source disk is protected by a customer-supplied encryption key. Defaults to: `null`.
- sourceDiskId (String.t): [Output Only] The ID value of the disk used to create this snapshot. This value may be used to determine whether the snapshot was taken from the current or a previous instance of a given disk name. Defaults to: `null`.
- status (String.t): [Output Only] The status of the snapshot. This can be CREATING, DELETING, FAILED, READY, or UPLOADING. Defaults to: `null`.
- Enum - one of [CREATING, DELETING, FAILED, READY, UPLOADING]
- storageBytes (String.t): [Output Only] A size of the storage used by the snapshot. As snapshots share storage, this number is expected to change with snapshot creation/deletion. Defaults to: `null`.
- storageBytesStatus (String.t): [Output Only] An indicator whether storageBytes is in a stable state or it is being adjusted as a result of shared storage reallocation. This status can either be UPDATING, meaning the size of the snapshot is being updated, or UP_TO_DATE, meaning the size of the snapshot is up-to-date. Defaults to: `null`.
- Enum - one of [UPDATING, UP_TO_DATE]
- storageLocations ([String.t]): GCS bucket storage location of the snapshot (regional or multi-regional). Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:creationTimestamp => any(),
:description => any(),
:diskSizeGb => any(),
:id => any(),
:kind => any(),
:labelFingerprint => any(),
:labels => map(),
:licenseCodes => list(any()),
:licenses => list(any()),
:name => any(),
:selfLink => any(),
:snapshotEncryptionKey => GoogleApi.Compute.V1.Model.CustomerEncryptionKey.t(),
:sourceDisk => any(),
:sourceDiskEncryptionKey => GoogleApi.Compute.V1.Model.CustomerEncryptionKey.t(),
:sourceDiskId => any(),
:status => any(),
:storageBytes => any(),
:storageBytesStatus => any(),
:storageLocations => list(any())
}
field(:creationTimestamp)
field(:description)
field(:diskSizeGb)
field(:id)
field(:kind)
field(:labelFingerprint)
field(:labels, type: :map)
field(:licenseCodes, type: :list)
field(:licenses, type: :list)
field(:name)
field(:selfLink)
field(:snapshotEncryptionKey, as: GoogleApi.Compute.V1.Model.CustomerEncryptionKey)
field(:sourceDisk)
field(:sourceDiskEncryptionKey, as: GoogleApi.Compute.V1.Model.CustomerEncryptionKey)
field(:sourceDiskId)
field(:status)
field(:storageBytes)
field(:storageBytesStatus)
field(:storageLocations, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.Snapshot do
def decode(value, options) do
GoogleApi.Compute.V1.Model.Snapshot.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.Snapshot do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 69.240385 | 670 | 0.730871 |
9ea323283db2bb3047e462a21de6bc18cc4fe63d | 3,166 | ex | Elixir | lib/iex/lib/iex/history.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 19,291 | 2015-01-01T02:42:49.000Z | 2022-03-31T21:01:40.000Z | lib/iex/lib/iex/history.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 8,082 | 2015-01-01T04:16:23.000Z | 2022-03-31T22:08:02.000Z | lib/iex/lib/iex/history.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 3,472 | 2015-01-03T04:11:56.000Z | 2022-03-29T02:07:30.000Z | defmodule IEx.History do
@moduledoc false
alias IEx.History
defstruct queue: :queue.new(), size: 0, start: 1
@doc """
Initializes IEx history state.
"""
def init(), do: %History{}
@doc """
Appends one entry to the history.
"""
def append(%History{} = state, entry, limit) do
{collect?, state} =
state
|> append(entry)
|> prune(limit)
if collect?, do: collect_garbage()
state
end
@doc """
Enumerates over all items in the history starting from the oldest one and
applies `fun` to each one in turn.
"""
def each(%History{} = state, fun) do
state
|> to_list()
|> Enum.each(fun)
end
@doc """
Gets the nth item from the history.
If `n` < 0, the count starts from the most recent item and goes back in time.
"""
# Traverses the queue front-to-back if the index is positive.
def nth(%History{queue: q, size: size, start: start}, n)
when n - start >= 0 and n - start < size do
get_nth(q, n - start)
end
# Traverses the queue back-to-front if the index is negative.
def nth(%History{queue: q, size: size, start: start}, n)
when n < 0 and size + n >= start - 1 do
get_nth(:queue.reverse(q), abs(n) - 1)
end
def nth(%History{}, n) do
raise "v(#{n}) is out of bounds"
end
defp get_nth(q, 0), do: :queue.head(q)
defp get_nth(q, n) when n > 0, do: get_nth(:queue.tail(q), n - 1)
defp append(%{queue: q, size: size} = state, item) do
%{state | queue: :queue.in(item, q), size: size + 1}
end
defp to_list(%{queue: q}), do: :queue.to_list(q)
# Based on https://github.com/erlang/otp/blob/7dcccee4371477e983f026db9e243cb66900b1ef/lib/stdlib/src/shell.erl#L1401
defp collect_garbage() do
collect_proc_garbage(Process.whereis(:user))
collect_proc_garbage(Process.group_leader())
:erlang.garbage_collect()
end
defp collect_proc_garbage(process) do
try do
:erlang.garbage_collect(process)
catch
_, _ -> nil
end
end
defp prune(%{start: start} = state, limit) do
prune(state, start, limit, false)
end
defp prune(state, _, limit, _) when limit < 0 do
{false, state}
end
defp prune(%{size: size} = state, counter, limit, collect?) when size - counter < limit do
{collect?, %{state | start: counter}}
end
defp prune(%{queue: q} = state, counter, limit, collect?) do
{{:value, entry}, q} = :queue.out(q)
collect? = collect? || has_binary(entry)
prune(%{state | queue: q}, counter + 1, limit, collect?)
end
# Checks val and each of its elements (if it is a list or a tuple)
# recursively to see if it has any large binaries (outside of the heap).
defp has_binary(val) do
try do
has_bin(val)
catch
:throw, :found -> true
end
end
defp has_bin(val) when is_tuple(val), do: has_bin(val, tuple_size(val) - 1)
defp has_bin([head | tail]) do
has_bin(head)
has_bin(tail)
end
defp has_bin(val) when byte_size(val) > 64, do: throw(:found)
defp has_bin(_), do: false
defp has_bin(_, -1), do: false
defp has_bin(tuple, index) do
has_bin(elem(tuple, index))
has_bin(tuple, index - 1)
end
end
| 24.929134 | 119 | 0.631712 |
9ea32b8f876b7a87d1b747e9176b08988a2bdbb3 | 4,200 | ex | Elixir | lib/petal_components/badge.ex | MortadaAK/petal_components | 69980b96afec232715071d75d3a394ac6daf75b3 | [
"MIT"
] | null | null | null | lib/petal_components/badge.ex | MortadaAK/petal_components | 69980b96afec232715071d75d3a394ac6daf75b3 | [
"MIT"
] | null | null | null | lib/petal_components/badge.ex | MortadaAK/petal_components | 69980b96afec232715071d75d3a394ac6daf75b3 | [
"MIT"
] | null | null | null | defmodule PetalComponents.Badge do
use Phoenix.Component
# prop label, :string
# prop size, :string, options: ["xs", "sm", "md", "lg", "xl"]
# prop variant, :string
# prop color, :string, options: ["primary", "secondary", "info", "success", "warning", "danger", "gray"]
# prop class, :css_class
def badge(assigns) do
assigns =
assigns
|> assign_new(:size, fn -> "md" end)
|> assign_new(:variant, fn -> "light" end)
|> assign_new(:color, fn -> "primary" end)
|> assign_new(:class, fn -> "" end)
|> assign_new(:icon, fn -> false end)
|> assign_new(:inner_block, fn -> nil end)
~H"""
<badge class={Enum.join([
"rounded inline-flex items-center justify-center focus:outline-none border",
size_classes(@size),
icon_classes(@icon),
get_color_classes(%{color: @color, variant: @variant}),
@class
], " ")}>
<%= if @inner_block do %>
<%= render_slot(@inner_block) %>
<% else %>
<%= @label %>
<% end %>
</badge>
"""
end
defp size_classes(size) do
case size do
"sm" -> "text-[0.625rem] font-semibold px-1.5"
"md" -> "text-xs font-semibold px-2.5 py-0.5"
"lg" -> "text-sm font-semibold px-2.5 py-0.5"
end
end
defp icon_classes(icon) do
if icon do
"flex gap-1 items-center whitespace-nowrap"
end
end
defp get_color_classes(%{color: "primary", variant: variant}) do
case variant do
"light" ->
"text-primary-800 bg-primary-100 border-primary-100 dark:bg-primary-200 dark:border-primary-200"
"dark" ->
"text-white bg-primary-600 border-primary-600"
"outline" ->
"text-primary-600 border-primary-600 dark:text-primary-400 dark:border-primary-400"
end
end
defp get_color_classes(%{color: "secondary", variant: variant}) do
case variant do
"light" ->
"text-secondary-800 bg-secondary-100 border-secondary-100 dark:bg-secondary-200 dark:border-secondary-200"
"dark" ->
"text-white bg-secondary-600 border-secondary-600"
"outline" ->
"text-secondary-600 border border-secondary-600 dark:text-secondary-400 dark:border-secondary-400"
end
end
defp get_color_classes(%{color: "info", variant: variant}) do
case variant do
"light" ->
"text-blue-800 bg-blue-100 border-blue-100 dark:bg-blue-200 dark:border-blue-200"
"dark" ->
"text-white bg-blue-600 border-blue-600"
"outline" ->
"text-blue-600 border border-blue-600 dark:text-blue-400 dark:border-blue-400"
end
end
defp get_color_classes(%{color: "success", variant: variant}) do
case variant do
"light" ->
"text-green-800 bg-green-100 border-green-100 dark:bg-green-200 dark:border-green-200"
"dark" ->
"text-white bg-green-600 border-green-600"
"outline" ->
"text-green-600 border border-green-600 dark:text-green-400 dark:border-green-400"
end
end
defp get_color_classes(%{color: "warning", variant: variant}) do
case variant do
"light" ->
"text-yellow-800 bg-yellow-100 border-yellow-100 dark:bg-yellow-200 dark:border-yellow-200"
"dark" ->
"text-white bg-yellow-600 border-yellow-600"
"outline" ->
"text-yellow-600 border border-yellow-600 dark:text-yellow-400 dark:border-yellow-400"
end
end
defp get_color_classes(%{color: "danger", variant: variant}) do
case variant do
"light" ->
"text-red-800 bg-red-100 border-red-100 dark:bg-red-200 dark:border-red-200"
"dark" ->
"text-white bg-red-600 border-red-600"
"outline" ->
"text-red-600 border border-red-600 dark:text-red-400 dark:border-red-400"
end
end
defp get_color_classes(%{color: "gray", variant: variant}) do
case variant do
"light" ->
"text-gray-800 bg-gray-100 border-gray-100 dark:bg-gray-200 dark:border-gray-200"
"dark" ->
"text-white bg-gray-600 border-gray-600 dark:bg-gray-700 dark:border-gray-700"
"outline" ->
"text-gray-600 border border-gray-600 dark:text-gray-400 dark:border-gray-400"
end
end
end
| 29.787234 | 114 | 0.61881 |
9ea33750d898daa0a9af59f153a2e1227d25cbc3 | 54 | exs | Elixir | config/prod.exs | kommitters/stellar_sdk | 4c3bb1c894b4bb34d9409c981ff008dce31bdb95 | [
"MIT"
] | 4 | 2022-01-14T16:00:07.000Z | 2022-03-22T19:16:55.000Z | config/prod.exs | kommitters/stellar_sdk | 4c3bb1c894b4bb34d9409c981ff008dce31bdb95 | [
"MIT"
] | 75 | 2021-08-18T16:04:12.000Z | 2022-03-31T18:14:23.000Z | config/prod.exs | kommitters/stellar_sdk | 4c3bb1c894b4bb34d9409c981ff008dce31bdb95 | [
"MIT"
] | 2 | 2022-01-23T03:16:43.000Z | 2022-02-07T20:13:37.000Z | use Mix.Config
config :stellar_sdk, network: :public
| 13.5 | 37 | 0.777778 |
9ea351e13ae9f96ed5deaf0f7286848d63eda683 | 214 | ex | Elixir | testData/org/elixir_lang/parser_definition/at_bracket_operation_parsing_test_case/ThreeOperator.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/at_bracket_operation_parsing_test_case/ThreeOperator.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/at_bracket_operation_parsing_test_case/ThreeOperator.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | @Module.^^^[key: value]
@Module.^^^ [key: value]
@Module.^^^[()]
@Module.^^^ [()]
@Module.^^^[matched_expression]
@Module.^^^ [matched_expression]
@Module.^^^[matched_expression,]
@Module.^^^ [matched_expression,]
| 23.777778 | 33 | 0.635514 |
9ea365bab1eb7ffbd37d6de06a5e8562eb255293 | 1,001 | ex | Elixir | apps/aecore/lib/aecore/naming/naming_state_tree.ex | SingularityMatrix/elixir-node | ad126aa97931165185cf35454718ed2eee40ceed | [
"ISC"
] | 131 | 2018-03-10T01:35:56.000Z | 2021-12-27T13:44:41.000Z | apps/aecore/lib/aecore/naming/naming_state_tree.ex | SingularityMatrix/elixir-node | ad126aa97931165185cf35454718ed2eee40ceed | [
"ISC"
] | 445 | 2018-03-12T09:46:17.000Z | 2018-12-12T09:52:07.000Z | apps/aecore/lib/aecore/naming/naming_state_tree.ex | SingularityMatrix/elixir-node | ad126aa97931165185cf35454718ed2eee40ceed | [
"ISC"
] | 23 | 2018-03-12T12:01:28.000Z | 2022-03-06T09:22:17.000Z | defmodule Aecore.Naming.NamingStateTree do
@moduledoc """
Top level naming state tree.
"""
use Aecore.Util.StateTrees, [:naming, [Aecore.Naming.Name, Aecore.Naming.NameCommitment]]
alias Aecore.Naming.{Name, NameCommitment}
@typedoc "Namings tree"
@type namings_state() :: Trie.t()
@spec process_struct(Name.t() | NameCommitment.t(), binary(), namings_state()) ::
Name.t() | NameCommitment.t() | {:error, String.t()}
def process_struct(%Name{} = deserialized_value, key, _tree) do
hash = Identifier.create_identity(key, :name)
%Name{deserialized_value | hash: hash}
end
def process_struct(%NameCommitment{} = deserialized_value, key, _tree) do
hash = Identifier.create_identity(key, :commitment)
%NameCommitment{deserialized_value | hash: hash}
end
def process_struct(deserialized_value, _key, _tree) do
{:error,
"#{__MODULE__}: Invalid data type: #{deserialized_value.__struct__} but expected %NameCommitment{} or %Name{}"}
end
end
| 34.517241 | 116 | 0.707293 |
9ea38bdf59593c28ee8937c598f4ee7c502817ac | 854 | ex | Elixir | lib/ria_task.ex | ria-com/elixir-task-wrapper | 96e9370dafb4a31a9564e715c182a2e4aaa2d92f | [
"MIT"
] | null | null | null | lib/ria_task.ex | ria-com/elixir-task-wrapper | 96e9370dafb4a31a9564e715c182a2e4aaa2d92f | [
"MIT"
] | null | null | null | lib/ria_task.ex | ria-com/elixir-task-wrapper | 96e9370dafb4a31a9564e715c182a2e4aaa2d92f | [
"MIT"
] | null | null | null | defmodule Ria.Task do
@moduledoc """
Ria.Task - wrapper for Elixir.Task
Task.await does not wait over timeout and does not throw exeptions
alias Ria.Task
task = Task.async(fn -> do_some_work() end)
do_some_other_work()
res = Task.await(task)
"""
def async(func), do: Task.async(func)
def start(func), do: Task.start(func)
def start_link(func), do: Task.start_link(func)
@doc """
Awaits a task reply and returns it.
Returns nil if something went wrong or timeout exceeds
## Examples
iex> Ria.Task.await(Ria.Task.async(fn->:timer.sleep(100) end))
:ok
iex> Ria.Task.await(Ria.Task.async(fn->:timer.sleep(100) end), 10)
nil
"""
def await(task, timeout \\ 5_000) do
case Task.yield(task, timeout) || Task.shutdown(task) do
{:ok, term} -> term
_ -> nil
end
end
end | 23.722222 | 72 | 0.642857 |
9ea3b41787f8c14c3e5d9a3fbd9de4f54f0369d5 | 195 | ex | Elixir | lumberjack/lib/memo.ex | lucas-larsson/ID1019 | b21a79bfa7fbbaaba0b4db88ec8b44fc5e9f291c | [
"MIT"
] | null | null | null | lumberjack/lib/memo.ex | lucas-larsson/ID1019 | b21a79bfa7fbbaaba0b4db88ec8b44fc5e9f291c | [
"MIT"
] | null | null | null | lumberjack/lib/memo.ex | lucas-larsson/ID1019 | b21a79bfa7fbbaaba0b4db88ec8b44fc5e9f291c | [
"MIT"
] | null | null | null | defmodule Memo do
def new() do %{} end
def add(mem, key, val) do Map.put(mem, :binary.list_to_bin(key), val) end
def lookup(mem, key) do Map.get(mem, :binary.list_to_bin(key)) end
end
| 17.727273 | 75 | 0.666667 |
9ea3b77760f9a43d1d6fc4f348bc0e3bc45ecca1 | 1,686 | exs | Elixir | test/xdr/transactions/operations/set_options_result_test.exs | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 3 | 2021-08-17T20:32:45.000Z | 2022-03-13T20:26:02.000Z | test/xdr/transactions/operations/set_options_result_test.exs | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 45 | 2021-08-12T20:19:41.000Z | 2022-03-27T21:00:10.000Z | test/xdr/transactions/operations/set_options_result_test.exs | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 2 | 2021-09-22T23:11:13.000Z | 2022-01-23T03:19:11.000Z | defmodule StellarBase.XDR.Operations.SetOptionsResultTest do
use ExUnit.Case
alias StellarBase.XDR.Void
alias StellarBase.XDR.Operations.{SetOptionsResult, SetOptionsResultCode}
describe "SetOptionsResult" do
setup do
code = SetOptionsResultCode.new(:SET_OPTIONS_SUCCESS)
%{
code: code,
value: Void.new(),
result: SetOptionsResult.new(Void.new(), code),
binary: <<0, 0, 0, 0>>
}
end
test "new/1", %{code: code, value: value} do
%SetOptionsResult{code: ^code, result: ^value} = SetOptionsResult.new(value, code)
end
test "encode_xdr/1", %{result: result, binary: binary} do
{:ok, ^binary} = SetOptionsResult.encode_xdr(result)
end
test "encode_xdr!/1", %{result: result, binary: binary} do
^binary = SetOptionsResult.encode_xdr!(result)
end
test "encode_xdr!/1 with a default value", %{code: code, binary: binary} do
result = SetOptionsResult.new("TEST", code)
^binary = SetOptionsResult.encode_xdr!(result)
end
test "decode_xdr/2", %{result: result, binary: binary} do
{:ok, {^result, ""}} = SetOptionsResult.decode_xdr(binary)
end
test "decode_xdr!/2", %{result: result, binary: binary} do
{^result, ^binary} = SetOptionsResult.decode_xdr!(binary <> binary)
end
test "decode_xdr!/2 an error code" do
{%SetOptionsResult{
code: %SetOptionsResultCode{identifier: :SET_OPTIONS_TOO_MANY_SIGNERS}
}, ""} = SetOptionsResult.decode_xdr!(<<255, 255, 255, 254>>)
end
test "decode_xdr/2 with an invalid binary" do
{:error, :not_binary} = SetOptionsResult.decode_xdr(123)
end
end
end
| 30.654545 | 88 | 0.655991 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.