hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9efac352acec216f45eee5734bc0b9ec40624a8d | 1,259 | exs | Elixir | test/bookings/report_test.exs | felipe-jm/ignite-elixir-flightex | cef013b2f7fee1b38572f4d81cf433cc5a863b0a | [
"MIT"
] | 1 | 2021-03-23T13:53:16.000Z | 2021-03-23T13:53:16.000Z | test/bookings/report_test.exs | felipe-jm/ignite-elixir-flightex | cef013b2f7fee1b38572f4d81cf433cc5a863b0a | [
"MIT"
] | null | null | null | test/bookings/report_test.exs | felipe-jm/ignite-elixir-flightex | cef013b2f7fee1b38572f4d81cf433cc5a863b0a | [
"MIT"
] | null | null | null | defmodule Flightex.Bookings.ReportTest do
use ExUnit.Case
import Flightex.Factory
alias Flightex.Bookings.Agent, as: BookingAgent
alias Flightex.Bookings.Booking
alias Flightex.Bookings.Report
alias Flightex.Users.Agent, as: UserAgent
describe "create/1" do
setup do
user = build(:user)
Flightex.start_agents()
{:ok, uuid} = UserAgent.save(user)
{:ok, id_usuario: uuid}
end
test "creates the report file", %{id_usuario: id_usuario} do
BookingAgent.start_link(%{})
booking1 = %Booking{
data_completa: "2015-01-23T23:50:07",
cidade_origem: "Cuiabá",
cidade_destino: "Londres",
id_usuario: id_usuario
}
BookingAgent.save(booking1)
booking2 = %Booking{
data_completa: "2019-01-23T23:50:07",
cidade_origem: "Cuiabá",
cidade_destino: "Tokyo",
id_usuario: id_usuario
}
BookingAgent.save(booking2)
expected_response =
"#{id_usuario},Cuiabá,Tokyo,2019-01-23T23:50:07\n" <>
"#{id_usuario},Cuiabá,Londres,2015-01-23T23:50:07\n"
Report.create("report_test.csv")
response = File.read!("report_test.csv")
assert response == expected_response
end
end
end
| 22.890909 | 64 | 0.641779 |
9efad99cd6f26dfb350cf6097386d8976a9a9132 | 251 | ex | Elixir | lib/erlnote.ex | alchexmist/erlnote | e1f164e63616316e1d3869ebfae5ed2ae96c3ccd | [
"Apache-2.0"
] | null | null | null | lib/erlnote.ex | alchexmist/erlnote | e1f164e63616316e1d3869ebfae5ed2ae96c3ccd | [
"Apache-2.0"
] | 1 | 2019-11-02T13:46:12.000Z | 2019-11-02T13:46:12.000Z | lib/erlnote.ex | alchexmist/erlnote | e1f164e63616316e1d3869ebfae5ed2ae96c3ccd | [
"Apache-2.0"
] | null | null | null | defmodule Erlnote do
@moduledoc """
Erlnote keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 25.1 | 66 | 0.752988 |
9efb24f43caa40d3e13d139c5cf13a1c136940dc | 794 | ex | Elixir | lib/pushex/app_manager/memory.ex | talkjs/pushex | e9f4d5c4d4df8165e0e712443c688e97d8d27c1e | [
"MIT"
] | null | null | null | lib/pushex/app_manager/memory.ex | talkjs/pushex | e9f4d5c4d4df8165e0e712443c688e97d8d27c1e | [
"MIT"
] | null | null | null | lib/pushex/app_manager/memory.ex | talkjs/pushex | e9f4d5c4d4df8165e0e712443c688e97d8d27c1e | [
"MIT"
] | 1 | 2018-12-14T08:19:28.000Z | 2018-12-14T08:19:28.000Z | defmodule Pushex.AppManager.Memory do
@moduledoc """
An in memory implementation using a `GenServer` for `Pushex.AppManager`
"""
use GenServer
@behaviour Pushex.AppManager
@valid_platforms ~w(gcm)a
def start(apps \\ []) do
GenServer.start(__MODULE__, apps, name: __MODULE__)
end
def start_link(apps \\ []) do
GenServer.start_link(__MODULE__, apps, name: __MODULE__)
end
def init([]) do
{:ok, Application.get_env(:pushex, :apps, [])}
end
def init(apps) do
{:ok, apps}
end
def find_app(platform, name) when platform in unquote(@valid_platforms) do
GenServer.call(__MODULE__, {:find, platform, name})
end
def handle_call({:find, platform, name}, _from, apps) do
app = Map.get(apps[platform], name)
{:reply, app, apps}
end
end
| 22.685714 | 76 | 0.675063 |
9efb843bbe20fcb458a81c4a661bb3829436e65f | 15,280 | ex | Elixir | lib/sparql_client/workload_info.ex | langens-jonathan/mu-authorization | 3b411460b81b87581af7c7f302b1d3bec4610608 | [
"MIT"
] | 1 | 2019-09-05T23:00:48.000Z | 2019-09-05T23:00:48.000Z | lib/sparql_client/workload_info.ex | langens-jonathan/mu-authorization | 3b411460b81b87581af7c7f302b1d3bec4610608 | [
"MIT"
] | 7 | 2020-10-27T20:42:06.000Z | 2021-11-15T07:41:15.000Z | lib/sparql_client/workload_info.ex | langens-jonathan/mu-authorization | 3b411460b81b87581af7c7f302b1d3bec4610608 | [
"MIT"
] | 6 | 2016-04-06T09:28:43.000Z | 2021-08-09T12:29:16.000Z | defmodule SparqlClient.WorkloadInfo do
use GenServer
alias SparqlClient.WorkloadInfo, as: Workload
defstruct running_pid_map: %{read: [], read_for_write: [], write: []},
waiting_from_map: %{read: [], read_for_write: [], write: []},
running_count: 0,
recovery_mode: false,
last_interval_failure_count: 0,
last_interval_success_count: 0,
database_failure_load: 0,
last_finished_workload: nil,
start_time: DateTime.utc_now()
use Accessible
@recovery_max %{write: 5, read_for_write: 5, read: 10}
@non_recovery_max_running 50
@failure_load_recovery_score 0.2
@failure_load_min_failures 3
@clocktick_interval 5000
@initial_clocktick_interval 10000
@previous_interval_keep_factor 0.5
# We want to react quickly to failures, hence for every
# @bump_failure_query_amount_for_tick (possibly plus one) failed
# queries within an interval we will increase the
# database_failure_load by @bump_load_increase_per_tick
@bump_failure_query_amount_for_tick 5
@bump_load_increase_per_tick 0.2
@moduledoc """
Helps to spread workloads coming into the database.
The WorkloadInfo server allows you to throttle your sparql
queries. It allows the system to pause execution on some queries in
order to let other queries pass through. This is mainly meant as a
backoff mechanism in case many queries are failing around the same
time. In this mechanism we want to ensure queries don't get
executed if they hose the database.
# General idea
This service receives information about when queries run and when
they succeed or fail.
# Technical construction
When queries pop up, this service is allowed to postpone the running of the query.
It is given control to decide when the query service may be ran. As
such, this service knows when queries have started to run. When a
query succeeds or fails, this service is informed. As such, the
service has a decent clue on the load on the database. When
failures start coming it, it will at some point shift into a
recovery mode. This mode will first execute all the update queries,
then read_for_write queries and then read queries. This makes the
endpoint temporarily unavailable.
# Considerations
It may be that a single process executes many queries in parallel.
We're currently assuming that not to be the case. Although this
assumption seems harsh, it's more likely to be the case than not as
you'd most likely (and in the current construction always) run these
queries in separate processes which then in turn have separate PIDs.
"""
@query_types [:read, :write, :read_for_write]
@type t :: %Workload{
running_pid_map: %{read: [pid], write: [pid], read_for_write: [pid]},
waiting_from_map: %{read: [pid], read_for_write: [pid], write: [pid]},
running_count: integer,
recovery_mode: boolean,
last_interval_failure_count: integer,
last_interval_success_count: integer,
database_failure_load: float,
last_finished_workload: %Workload{last_finished_workload: nil} | nil,
start_time: DateTime.t()
}
@type query_types :: SparqlClient.query_types()
@doc """
Indicates whether or not we should run the WorkloadInfo logic.
"""
@spec enabled?() :: boolean
def enabled?() do
Application.get_env(:"mu-authorization", :database_recovery_mode_enabled)
end
@doc """
Reports the backend successfully sending a response.
"""
@spec report_success(query_types) :: :ok
def report_success(query_type) do
if enabled?() do
GenServer.cast(__MODULE__, {:report_success, self(), query_type})
else
:ok
end
end
@doc """
Reports the backend failing to send a response.
"""
@spec report_failure(query_types) :: :ok
def report_failure(query_type) do
if enabled?() do
GenServer.cast(__MODULE__, {:report_failure, self(), query_type})
else
:ok
end
end
@spec report_timeout(query_types) :: :ok
def report_timeout(query_types) do
# Follows same flow as report_cancellation
report_cancellation(query_types)
end
@spec report_cancellation(query_types) :: :ok
def report_cancellation(query_type) do
# Follows same flow as report_timeout
if enabled?() do
GenServer.cast(__MODULE__, {:report_cancellation, self(), query_type})
else
:ok
end
end
@doc """
Executes a timeout in case of read requests.
"""
@spec timeout(query_types, integer) :: :ok
def timeout(query_type, max_timeout \\ 60000) do
if enabled?() do
GenServer.call(__MODULE__, {:timeout, query_type}, max_timeout)
else
:ok
end
end
@spec start_clocktick() :: pid
def start_clocktick() do
Logging.EnvLog.log(:log_database_recovery_mode_tick, "Starting WorkloadInfo clockticks")
spawn(fn ->
Process.sleep(@initial_clocktick_interval)
continue_clocktick()
end)
end
def continue_clocktick() do
spawn(fn ->
Process.sleep(@clocktick_interval)
continue_clocktick()
end)
GenServer.cast(__MODULE__, :clocktick)
Logging.EnvLog.log(
:log_database_recovery_mode_tick,
"Pushed WorkloadInfo clocktick on message stack"
)
end
def get_state() do
GenServer.call(__MODULE__, :get_state, 25000)
end
def start_link(_) do
GenServer.start_link(__MODULE__, nil, name: __MODULE__)
end
@impl true
def init(_) do
start_clocktick()
{:ok, %Workload{}}
end
def handle_cast(:clocktick, %Workload{} = workload) do
last_finished_workload = workload
old_failure_factor = workload.database_failure_load * @previous_interval_keep_factor
new_failure_factor =
if workload.last_interval_success_count == 0 do
0
else
workload.last_interval_failure_count / workload.last_interval_success_count
end
new_failure_load =
if workload.last_interval_failure_count == 0 do
# lower hard when no failures are detected
min(1, 0.2 * old_failure_factor)
else
@previous_interval_keep_factor * old_failure_factor +
(1 - @previous_interval_keep_factor) * new_failure_factor
end
# This is similar to
new_recovery_mode =
if workload.database_failure_load do
if workload.last_interval_failure_count <= @failure_load_min_failures &&
Enum.empty?(workload.waiting_from_map[:write]) &&
Enum.empty?(workload.waiting_from_map[:read_for_write]) &&
new_failure_load < @failure_load_recovery_score do
false
else
true
end
else
new_failure_load > @failure_load_recovery_score &&
workload.last_interval_failure_count >= @failure_load_min_failures
end
new_workload = %{
workload
| database_failure_load: new_failure_load,
recovery_mode: new_recovery_mode,
last_interval_success_count: 0,
last_interval_failure_count: 0,
last_finished_workload: %{last_finished_workload | last_finished_workload: nil},
start_time: DateTime.utc_now()
}
{:noreply, new_workload}
end
@impl true
def handle_cast(
{:report_success, pid, query_type},
workload
) do
workload =
workload
|> remove_running_pid(query_type, pid)
|> update_in([:last_interval_success_count], &(&1 + 1))
|> trigger_new_queries
{:noreply, workload}
end
@impl true
def handle_cast(
{:report_failure, pid, query_type},
workload
) do
workload =
workload
|> remove_running_pid(query_type, pid)
|> update_in([:last_interval_failure_count], &(&1 + 1))
|> mid_clocktick_failure_load_update
|> trigger_new_queries
{:noreply, workload}
end
@impl true
def handle_cast(
{:report_cancellation, pid, query_type},
workload
) do
workload =
workload
|> remove_running_pid(query_type, pid)
|> trigger_new_queries
{:noreply, workload}
end
@impl true
def handle_call(:get_state, _from, workload) do
{:reply, workload, workload}
end
@impl true
def handle_call(
{:timeout, query_type},
from,
workload
) do
{pid, _} = from
Process.monitor(pid)
workload =
workload
|> queue_from(query_type, from)
|> trigger_new_queries
{:noreply, workload}
end
@impl true
def handle_info(
{:DOWN, _reference, _process, pid, _error},
%Workload{
running_pid_map: pid_map
} = workload
) do
{read_map, read_count} = count_and_remove(pid_map[:read], pid)
{write_map, write_count} = count_and_remove(pid_map[:write], pid)
{read_for_write_map, read_for_write_count} = count_and_remove(pid_map[:read_for_write], pid)
workload
|> update_in([:running_count], fn count ->
count - (read_count + write_count + read_for_write_count)
end)
|> put_in([:running_pid_map, :read], read_map)
|> put_in([:running_pid_map, :write], write_map)
|> put_in([:running_pid_map, :read_for_write], read_for_write_map)
|> trigger_new_queries
|> wrap_in_noreply
end
@spec count_and_remove(Enum.t(), any | (any -> boolean)) :: {Enum.t(), number}
defp count_and_remove(enum, matcher) when is_function(matcher) do
{reversed_enum, number} =
enum
|> Enum.reduce({[], 0}, fn elem, {items, removal_count} ->
if matcher.(elem) do
{items, removal_count + 1}
else
{[elem | items], removal_count}
end
end)
{Enum.reverse(reversed_enum), number}
end
defp count_and_remove(enum, pid) do
count_and_remove(enum, &(&1 == pid))
end
@spec remove_running_pid(t, query_types, pid) :: t
defp remove_running_pid(workload, query_type, pid) do
# Lowers the running count and removes the pid (once) from the
# respective list.
workload
|> update_in([:running_count], &(&1 - 1))
|> update_in([:running_pid_map, query_type], &List.delete(&1, pid))
end
@spec launch_client(t, query_types, GenServer.from()) :: t
defp launch_client(workload, query_type, from) do
# Launch a new client
GenServer.reply(from, :ok)
{from_pid, _} = from
# Update workload
workload
|> update_in([:running_count], &(&1 + 1))
|> update_in([:waiting_from_map, query_type], &List.delete(&1, from))
|> update_in([:running_pid_map, query_type], fn x -> [from_pid | x] end)
# TODO: set up monitor __when PID is added to from__
end
@spec queue_from(t, query_types, GenServer.from()) :: t
defp queue_from(workload, query_type, from) do
workload
|> update_in([:waiting_from_map, query_type], &(&1 ++ [from]))
end
defp wrap_in_noreply(thing) do
{:noreply, thing}
end
### Semi-public interface, to be used for debugging.
@spec trigger_new_queries(t) :: t
def trigger_new_queries(
%Workload{recovery_mode: false, running_count: running_count} = workload
)
when running_count < @non_recovery_max_running do
to_launch = @non_recovery_max_running - running_count
{_to_launch, new_workload} =
@query_types
|> Enum.reduce_while(
{to_launch, workload},
fn
_method, {0, workload} ->
{:halt, {to_launch, workload}}
method, {to_launch, workload} ->
froms_to_start = workload.waiting_from_map[method]
{to_launch, workload} =
froms_to_start
|> Enum.reduce_while({to_launch, workload}, fn
_from, {0, workload} ->
{:halt, {0, workload}}
from, {to_launch, workload} ->
new_workload = launch_client(workload, method, from)
{:cont, {to_launch - 1, new_workload}}
end)
{:cont, {to_launch, workload}}
end
)
new_workload
end
def trigger_new_queries(%Workload{recovery_mode: true} = workload) do
# Find the recovery mode running type we are in
running_type = recovery_running_type(workload)
max_of_type = @recovery_max[running_type]
queries_to_run = max_of_type - workload.running_count
{workload, _} =
if queries_to_run > 0 do
# start as many queries of running type as we can
Enum.reduce_while(workload.waiting_from_map[running_type], {workload, queries_to_run}, fn
_from, {workload, 0} ->
{:halt, {workload, 0}}
from, {workload, queries_to_run} ->
new_workload = launch_client(workload, running_type, from)
{:cont, {new_workload, queries_to_run - 1}}
end)
else
{workload, nil}
end
workload
end
@doc """
Gets the running type in case of recovery mode, or nil if we are not
in recovery mode.
This is the type of queries we should be running. This is thus the
most important type of query which we are either running or which we
could be running.
"""
@spec recovery_running_type(t) :: query_types | nil
def recovery_running_type(%Workload{recovery_mode: false}),
do: nil
def recovery_running_type(workload) do
cond do
!Enum.empty?(workload.running_pid_map.write) -> :write
!Enum.empty?(workload.waiting_from_map.write) -> :write
!Enum.empty?(workload.running_pid_map.read_for_write) -> :read_for_write
!Enum.empty?(workload.waiting_from_map.read_for_write) -> :read_for_write
!Enum.empty?(workload.running_pid_map.read) -> :read
!Enum.empty?(workload.waiting_from_map.read) -> :read
true -> :read
end
end
_ = """
Handles updating of load and recovery state during a clocktick.
"""
@spec mid_clocktick_failure_load_update(t) :: t
defp mid_clocktick_failure_load_update(workload) do
workload
|> increase_failure_load_during_clocktick()
|> update_recovery_mode_during_clocktick()
end
_ = """
Increases the database load in case we have failed too many times in
this interval.
"""
@spec increase_failure_load_during_clocktick(t) :: t
defp increase_failure_load_during_clocktick(workload) do
cond do
is_zero(@bump_load_increase_per_tick) ->
workload
is_zero(@bump_failure_query_amount_for_tick) ->
workload
true ->
interval_failure_count = workload.last_interval_failure_count
failure_tick_rem =
Integer.mod(interval_failure_count + 1, @bump_failure_query_amount_for_tick)
is_failure_tick = failure_tick_rem == 0
if is_failure_tick do
update_in(workload.database_failure_load, &(&1 + @bump_load_increase_per_tick))
else
workload
end
end
end
@spec update_recovery_mode_during_clocktick(t) :: t
defp update_recovery_mode_during_clocktick(workload) do
update_in(
workload.recovery_mode,
&(&1 ||
(workload.last_interval_failure_count > @failure_load_min_failures &&
workload.database_failure_load > @failure_load_recovery_score))
)
end
@spec is_zero(number) :: boolean
defp is_zero(0), do: true
defp is_zero(_), do: false
end
| 29.384615 | 97 | 0.67127 |
9efbb145adc338301f4afbc17184b3bcb29c0ddc | 152 | exs | Elixir | priv/repo/migrations/20171230122831_add_unique_index_on_emails.exs | stevegrossi/ex_venture | e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa | [
"MIT"
] | 2 | 2019-05-14T11:36:44.000Z | 2020-07-01T08:54:04.000Z | priv/repo/migrations/20171230122831_add_unique_index_on_emails.exs | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | null | null | null | priv/repo/migrations/20171230122831_add_unique_index_on_emails.exs | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | 1 | 2021-01-29T14:12:40.000Z | 2021-01-29T14:12:40.000Z | defmodule Data.Repo.Migrations.AddUniqueIndexOnEmails do
use Ecto.Migration
def change do
create index(:users, :email, unique: true)
end
end
| 19 | 56 | 0.756579 |
9efc1bd45f2585d92cb9c8bebf1cc2fec595f75a | 1,276 | ex | Elixir | apps/extended_api/lib/extended_api/worker/get_trytes/function/edge_fn.ex | iotaledger/chronicle | 73566e5613268e4b0c5951265ae4760cedb4051f | [
"Apache-2.0"
] | 19 | 2019-09-17T18:14:36.000Z | 2021-12-06T07:29:27.000Z | apps/extended_api/lib/extended_api/worker/get_trytes/function/edge_fn.ex | iotaledger/chronicle | 73566e5613268e4b0c5951265ae4760cedb4051f | [
"Apache-2.0"
] | 5 | 2019-09-30T04:57:14.000Z | 2020-11-10T15:41:03.000Z | apps/extended_api/lib/extended_api/worker/get_trytes/function/edge_fn.ex | iotaledger/chronicle | 73566e5613268e4b0c5951265ae4760cedb4051f | [
"Apache-2.0"
] | 2 | 2019-09-17T19:03:16.000Z | 2021-03-01T01:04:31.000Z | defmodule ExtendedApi.Worker.GetTrytes.EdgeFn do
@moduledoc """
This module hold the function that are going to compute
the edge query result, it creates one query
to fetch two rows
1- address's information row (fixed_fields).
2- transaction's information row (dymanic_fields).
finally it returns
{:ok, query_state} mean the query has been received
by shard's stage (reporter)
or {:error, reason} if the reporter/shardstage is dead.
"""
alias ExtendedApi.Worker.GetTrytes.Helper
@doc """
This function handle the edge row.
"""
@spec bundle_queries(integer, Keyword.t, list) :: tuple
def bundle_queries(
ref,
[lb: lb, ts: ts, v2: bh, ex: ex, ix: ix, el: el, lx: lx],
_) do
# lb indicates label (tx_hash)
# ts indicates bundle_timestamp
# v2(bh) indicates bundle_hash/bh
# ex indicates h_hash/id
# ix indicates current_index
# el indicates whether input or output.
# lx indicates last_index.
# we are sending the bundle query.
{ok?, _, query_state} =
Helper.bundle_query(bh, el,lb, ts, ix,lx,ex, ref)
if ok? == :ok do
# we return query state
{:ok, query_state}
else
# we break,
{:error, {:dead_shard_stage, ok?} }
end
end
end
| 26.583333 | 61 | 0.651254 |
9efc220acfe04b5da477d75f1c05c5bac02e87cc | 254 | exs | Elixir | priv/repo/migrations/20180723212733_create_users.exs | kenkeiras/MonTree | 7ec724634c123ee085921c4cac07800c9a15e4ec | [
"WTFPL"
] | 2 | 2018-10-26T06:06:42.000Z | 2020-02-16T15:01:42.000Z | priv/repo/migrations/20180723212733_create_users.exs | kenkeiras/TechTree | 26c8ca59855002a88bf15eb0b64a6c788f438ec8 | [
"WTFPL"
] | 108 | 2018-10-25T10:30:33.000Z | 2021-07-28T04:18:03.000Z | priv/repo/migrations/20180723212733_create_users.exs | kenkeiras/TechTree | 26c8ca59855002a88bf15eb0b64a6c788f438ec8 | [
"WTFPL"
] | null | null | null | defmodule Techtree.Repo.Migrations.CreateUsers do
use Ecto.Migration
def change do
create table(:users) do
add :name, :string
add :username, :string
timestamps()
end
create unique_index(:users, [:username])
end
end
| 16.933333 | 49 | 0.665354 |
9efc2907a66b313546d126c6b8d13420e4649c01 | 1,160 | exs | Elixir | lib/mix/test/mix/tasks/help_test.exs | enokd/elixir | e39b32f235082b8a29fcb22d250c822cca98609f | [
"Apache-2.0"
] | 1 | 2015-11-12T19:23:45.000Z | 2015-11-12T19:23:45.000Z | lib/mix/test/mix/tasks/help_test.exs | enokd/elixir | e39b32f235082b8a29fcb22d250c822cca98609f | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/tasks/help_test.exs | enokd/elixir | e39b32f235082b8a29fcb22d250c822cca98609f | [
"Apache-2.0"
] | null | null | null | Code.require_file "../../test_helper.exs", __DIR__
defmodule Mix.Tasks.HelpTest do
use MixTest.Case
test "help lists all tasks" do
in_fixture "only_mixfile", fn ->
Mix.Tasks.Help.run []
assert_received { :mix_shell, :info, ["mix" <> _] }
assert_received { :mix_shell, :info, ["mix help" <> _] }
assert_received { :mix_shell, :info, ["mix compile" <> _] }
end
end
test "help list default task" do
in_fixture "only_mixfile", fn ->
Mix.Tasks.Help.run []
{ _, _, [output] } =
assert_received { :mix_shell, :info, [_] }
assert output =~ ~r/^mix\s+# Run the default task \(current: mix run\)/m
end
end
test "help TASK" do
in_fixture "only_mixfile", fn ->
Mix.Tasks.Help.run ["compile"]
{ _, _, [output] } =
assert_received { :mix_shell, :info, [_] }
assert output =~ "# mix help compile"
{ _, _, [output] } =
assert_received { :mix_shell, :info, [_] }
assert output =~ "## Command line options"
{ _, _, [output] } =
assert_received { :mix_shell, :info, [_] }
assert output =~ ~r/^Location:/m
end
end
end
| 26.976744 | 78 | 0.574138 |
9efc4e74496fe6ab39d5b5a8f6619bd6f9777a9b | 844 | ex | Elixir | lib/kafka_ex/exceptions.ex | Zarathustra2/kafka_ex | 436a84c7a32bee40f1dbffc17a3c1a0f2fc98c30 | [
"MIT"
] | 536 | 2015-08-10T03:14:39.000Z | 2022-03-24T15:07:33.000Z | lib/kafka_ex/exceptions.ex | Zarathustra2/kafka_ex | 436a84c7a32bee40f1dbffc17a3c1a0f2fc98c30 | [
"MIT"
] | 349 | 2015-09-16T21:27:42.000Z | 2022-03-28T08:51:56.000Z | lib/kafka_ex/exceptions.ex | Zarathustra2/kafka_ex | 436a84c7a32bee40f1dbffc17a3c1a0f2fc98c30 | [
"MIT"
] | 180 | 2015-09-01T22:58:26.000Z | 2022-03-25T16:47:49.000Z | defmodule KafkaEx.ConsumerGroupRequiredError do
defexception [:message]
def exception(%{__struct__: struct}) do
message =
"KafkaEx requests of type #{struct} " <>
"require that the worker is configured for a consumer group."
%__MODULE__{message: message}
end
def exception(action) when is_binary(action) do
message =
"KafkaEx #{action} requires that the worker is configured " <>
"for a consumer group."
%__MODULE__{message: message}
end
end
defmodule KafkaEx.InvalidConsumerGroupError do
defexception [:message]
def exception(consumer_group) do
message = "Invalid consumer group: #{inspect(consumer_group)}"
%__MODULE__{message: message}
end
end
defmodule KafkaEx.TimestampNotSupportedError do
defexception message: "Timestamp requires produce api_version >= 3"
end
| 25.575758 | 69 | 0.725118 |
9efc6e8f99eb587f9b5f49010d9187236f5b5bcc | 165 | exs | Elixir | examples/simple/test/test_helper.exs | korutx/mongodb_ecto | a368293c3e524f24493d0cfb9482a3bc616ebcd9 | [
"Apache-2.0"
] | 256 | 2015-05-27T17:45:54.000Z | 2017-08-20T15:07:41.000Z | examples/simple/test/test_helper.exs | korutx/mongodb_ecto | a368293c3e524f24493d0cfb9482a3bc616ebcd9 | [
"Apache-2.0"
] | 134 | 2015-05-31T19:43:44.000Z | 2017-08-22T23:57:20.000Z | examples/simple/test/test_helper.exs | korutx/mongodb_ecto | a368293c3e524f24493d0cfb9482a3bc616ebcd9 | [
"Apache-2.0"
] | 81 | 2015-05-29T19:57:19.000Z | 2017-08-22T13:36:45.000Z | ExUnit.start()
Logger.configure(level: :info)
defmodule Simple.Case do
use ExUnit.CaseTemplate
setup do
Mongo.Ecto.truncate(Simple.Repo)
:ok
end
end
| 13.75 | 36 | 0.721212 |
9efc851c9174edbd26b3ad15176351501eeff63d | 2,735 | ex | Elixir | lib/oban/plugins/gossip.ex | thiamsantos/oban | 2e323b9fb94b16f1e308cfbadb5e9dc3bee37318 | [
"Apache-2.0"
] | null | null | null | lib/oban/plugins/gossip.ex | thiamsantos/oban | 2e323b9fb94b16f1e308cfbadb5e9dc3bee37318 | [
"Apache-2.0"
] | null | null | null | lib/oban/plugins/gossip.ex | thiamsantos/oban | 2e323b9fb94b16f1e308cfbadb5e9dc3bee37318 | [
"Apache-2.0"
] | null | null | null | defmodule Oban.Plugins.Gossip do
@moduledoc """
Periodically broadcast queue activity to the gossip notification channel.
## Using the Plugin
The following example demonstrates using the plugin without any configuration, which will broadcast
the state of each local queue every 1 second:
config :my_app, Oban,
plugins: [Oban.Plugins.Gossip],
...
Override the default options to broadcast every 5 seconds:
config :my_app, Oban,
plugins: [{Oban.Plugins.Gossip, interval: :timer.seconds(5)}],
...
## Options
* `:interval` — the number of milliseconds between gossip broadcasts
## Instrumenting with Telemetry
The `Oban.Plugins.Gossip` plugin adds the following metadata to the `[:oban, :plugin, :stop]` event:
* `:gossip_count` - the number of queues that had activity broadcasted
"""
use GenServer
alias Oban.{Config, Notifier}
@type option :: {:conf, Config.t()} | {:name, GenServer.name()} | {:interval, pos_integer()}
defmodule State do
@moduledoc false
defstruct [:conf, :name, :timer, interval: :timer.seconds(1)]
end
@doc false
@spec start_link([option()]) :: GenServer.on_start()
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: opts[:name])
end
@impl GenServer
def init(opts) do
Process.flag(:trap_exit, true)
state =
State
|> struct!(opts)
|> schedule_gossip()
{:ok, state}
end
@impl GenServer
def terminate(_reason, state) do
if is_reference(state.timer), do: Process.cancel_timer(state.timer)
:ok
end
@impl GenServer
def handle_info(:gossip, %State{} = state) do
meta = %{conf: state.conf, plugin: __MODULE__}
match = [{{{state.conf.name, {:producer, :_}}, :"$1", :_}, [], [:"$1"]}]
:telemetry.span(state.conf.telemetry_prefix ++ [:plugin], meta, fn ->
checks =
Oban.Registry
|> Registry.select(match)
|> Enum.map(&safe_check(&1, state))
|> Enum.reject(&is_nil/1)
|> Enum.map(&sanitize_name/1)
if Enum.any?(checks), do: Notifier.notify(state.conf, :gossip, checks)
{:ok, Map.put(meta, :gossip_count, length(checks))}
end)
{:noreply, schedule_gossip(state)}
end
def handle_info(_message, state) do
{:noreply, state}
end
defp schedule_gossip(state) do
%{state | timer: Process.send_after(self(), :gossip, state.interval)}
end
defp safe_check(pid, state) do
if Process.alive?(pid), do: GenServer.call(pid, :check, state.interval)
catch
:exit, _ -> nil
end
defp sanitize_name(%{name: name} = check) when is_binary(name), do: check
defp sanitize_name(%{name: name} = check), do: %{check | name: inspect(name)}
end
| 25.560748 | 102 | 0.650457 |
9efcafbb8d760c87781dde35fc888bc10d2c41cb | 538 | exs | Elixir | priv/repo/migrations/20170627125432_create_golos_comments.exs | cyberpunk-ventures/glasnost | d6853a191ac2094dbe160953094a6d0c7e6c0778 | [
"MIT"
] | 26 | 2017-03-04T20:07:09.000Z | 2018-08-15T10:16:06.000Z | priv/repo/migrations/20170627125432_create_golos_comments.exs | cyberpunk-ventures/glasnost | d6853a191ac2094dbe160953094a6d0c7e6c0778 | [
"MIT"
] | 18 | 2017-03-18T13:59:31.000Z | 2018-09-06T10:08:33.000Z | priv/repo/migrations/20170627125432_create_golos_comments.exs | metachaos-systems/glasnost | d6853a191ac2094dbe160953094a6d0c7e6c0778 | [
"MIT"
] | 9 | 2017-03-16T19:02:04.000Z | 2018-06-02T23:44:52.000Z | defmodule Glasnost.Repo.Migrations.CreateGolosComments do
use Ecto.Migration
def change do
create_if_not_exists table(:golos_comments) do
add :author, :text
add :title, :text
add :permlink, :text
add :body, :text
add :body_html, :text
add :tags, {:array, :text}
add :category, :text
add :json_metadata, :map
add :created, :timestamp
add :total_payout_value, :float
add :pending_payout_value, :float
timestamps()
end
end
end
| 24.454545 | 57 | 0.611524 |
9efcbaaf7c08af6030aa30b2e7e3039f35e6cb32 | 1,190 | ex | Elixir | core/sup_tree_core/periodic_log/message_builder.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | 1 | 2020-08-27T18:43:11.000Z | 2020-08-27T18:43:11.000Z | core/sup_tree_core/periodic_log/message_builder.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | null | null | null | core/sup_tree_core/periodic_log/message_builder.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | 1 | 2020-08-27T18:43:21.000Z | 2020-08-27T18:43:21.000Z | # Copyright(c) 2015-2020 ACCESS CO., LTD. All rights reserved.
defmodule AntikytheraCore.PeriodicLog.MessageBuilder do
@max_proc_to_log 5
@max_msg_to_log 10
@queue_len_threshold 100
def init() do
nil
end
def build_log(state) do
log =
:recon.proc_count(:message_queue_len, @max_proc_to_log)
|> Enum.filter(fn({_pid, qlen, _info}) -> qlen >= @queue_len_threshold end)
|> build_log_from_processes()
{log, state}
end
defp build_log_from_processes([]), do: nil
defp build_log_from_processes(procs) do
log_time = Antikythera.Time.to_iso_timestamp(Antikythera.Time.now())
procs
|> Enum.reduce(log_time, fn({pid, qlen, info}, acc) ->
acc2 = acc <> "\n" <> Integer.to_string(qlen) <> " " <> inspect(info, structs: false)
append_messages_to_log(acc2, Process.info(pid))
end)
end
defp append_messages_to_log(log, nil), do: log <> "\n This process has already exited."
defp append_messages_to_log(log, process_info) do
process_info
|> Keyword.get(:messages)
|> Enum.take(@max_msg_to_log)
|> Enum.reduce(log, fn(msg, acc) -> acc <> "\n " <> inspect(msg, structs: false) end)
end
end
| 31.315789 | 92 | 0.670588 |
9efcefc1779b7d5d9f8dd3c4c305f96bbe0f42bb | 57,022 | ex | Elixir | lib/mix/lib/mix/tasks/release.ex | dvic/elixir | 6db7b54c4cb9c77805d19ca695a23c40ac3f8aaa | [
"Apache-2.0"
] | 1 | 2021-02-22T14:21:02.000Z | 2021-02-22T14:21:02.000Z | lib/mix/lib/mix/tasks/release.ex | bessbd/elixir | 2b8de986238ce0e9593ed5b5d182994a562c1c30 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/release.ex | bessbd/elixir | 2b8de986238ce0e9593ed5b5d182994a562c1c30 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Release do
use Mix.Task
@shortdoc "Assembles a self-contained release"
@moduledoc """
Assembles a self-contained release for the current project:
MIX_ENV=prod mix release
MIX_ENV=prod mix release NAME
Once a release is assembled, it can be packaged and deployed to a
target, as long as the target runs on the same operating system (OS)
distribution and version as the machine running the `mix release`
command.
A release can be configured in your `mix.exs` file under the `:releases`
key inside `def project`:
def project do
[
releases: [
demo: [
include_executables_for: [:unix],
applications: [runtime_tools: :permanent]
],
...
]
]
end
You can specify multiple releases where the key is the release name
and the value is a keyword list with the release configuration.
Releasing a certain name is done with:
MIX_ENV=prod mix release demo
If the given name does not exist, an error is raised.
If `mix release`, without a name, is invoked and there are multiple names,
an error will be raised unless you set `default_release: NAME` at the root
of your project configuration.
If `mix release` is invoked and there are no names, a release using the
application name and default values is assembled.
## Why releases?
Releases allow developers to precompile and package all of their code
and the runtime into a single unit. The benefits of releases are:
* Code preloading. The VM has two mechanisms for loading code:
interactive and embedded. By default, it runs in the interactive
mode which dynamically loads modules when they are used for the
first time. The first time your application calls `Enum.map/2`,
the VM will find the `Enum` module and load it. There’s a downside.
When you start a new server in production, it may need to load
many other modules, causing the first requests to have an unusual
spike in response time. When running in Erlang/OTP earlier than 23,
the system always runs in embedded mode. When using Erlang/OTP 23+,
they run in interactive mode while being configured and then it
swaps to embedded mode, guaranteeing your system is ready to handle
requests after booting.
* Configuration and customization. Releases give developers fine
grained control over system configuration and the VM flags used
to start the system.
* Self-contained. A release does not require the source code to be
included in your production artifacts. All of the code is precompiled
and packaged. Releases do not even require Erlang or Elixir in your
servers, as it includes the Erlang VM and its runtime by default.
Furthermore, both Erlang and Elixir standard libraries are stripped
to bring only the parts you are actually using.
* Multiple releases. You can assemble different releases with
different configuration per application or even with different
applications altogether.
* Management scripts. Releases come with scripts to start, restart,
connect to the running system remotely, execute RPC calls, run as
daemon, run as a Windows service, and more.
## Running the release
Once a release is assembled, you can start it by calling
`bin/RELEASE_NAME start` inside the release. In production, you would do:
MIX_ENV=prod mix release
_build/prod/rel/my_app/bin/my_app start
`bin/my_app start` will start the system connected to the current standard
input/output, where logs are also written to by default. This is the
preferred way to run the system. Many tools, such as `systemd`, platforms
as a service, such as Heroku, and many containers platforms, such as Docker,
are capable of processing the standard input/output and redirecting
the log contents elsewhere. Those tools and platforms also take care
of restarting the system in case it crashes.
You can also execute one-off commands, run the release as a daemon on
Unix-like system, or install it as a service on Windows. We will take a
look at those next. You can also list all available commands by invoking
`bin/RELEASE_NAME`.
### One-off commands (eval and rpc)
If you want to invoke specific modules and functions in your release,
you can do so in two ways: using `eval` or `rpc`.
bin/RELEASE_NAME eval "IO.puts(:hello)"
bin/RELEASE_NAME rpc "IO.puts(:hello)"
The `eval` command starts its own instance of the VM but without
starting any of the applications in the release and without starting
distribution. For example, if you need to do some prep work before
running the actual system, like migrating your database, `eval` can
be a good fit. Just keep in mind any application you may use during
eval has to be explicitly loaded and/or started.
You can start an application by calling `Application.ensure_all_started/1`.
However, if for some reason you cannot start an application, maybe
because it will run other services you do not want, you must at least
load the application by calling `Application.load/1`. If you don't
load the application, any attempt at reading its environment or
configuration may fail. Note that if you start an application,
it is automatically loaded before started.
Another way to run commands is with `rpc`, which will connect to the
system currently running and instruct it to execute the given
expression. This means you need to guarantee the system was already
started and be careful with the instructions you are executing.
You can also use `remote` to connect a remote IEx session to the
system.
#### Helper module
As you operate your system, you may find yourself running some piece of code
as a one-off command quite often. You may consider creating a module to group
these tasks:
# lib/my_app/release_tasks.ex
defmodule MyApp.ReleaseTasks do
def eval_purge_stale_data() do
# Eval commands needs to start the app before
# Or Application.load(:my_app) if you can't start it
Application.ensure_all_started(:my_app)
# Code that purges stale data
...
end
def rpc_print_connected_users() do
# Code that print users connected to the current running system
...
end
end
In the example above, we prefixed the function names with the command
name used to execute them, but that is entirely optional.
And to run them:
bin/RELEASE_NAME eval "MyApp.ReleaseTasks.eval_purge_stale_data()"
bin/RELEASE_NAME rpc "MyApp.ReleaseTasks.rpc_print_connected_users()"
### Daemon mode (Unix-like)
You can run the release in daemon mode with the command:
bin/RELEASE_NAME daemon
In daemon mode, the system is started on the background via
[`run_erl`](https://erlang.org/doc/man/run_erl.html). You may also
want to enable [`heart`](https://erlang.org/doc/man/heart.html)
in daemon mode so it automatically restarts the system in case
of crashes. See the generated `releases/RELEASE_VSN/env.sh` file.
The daemon will write all of its standard output to the "tmp/log/"
directory in the release root. You can watch the log file by doing
`tail -f tmp/log/erlang.log.1` or similar. Once files get too large,
the index suffix will be incremented. A developer can also attach
to the standard input of the daemon by invoking "to_erl tmp/pipe/"
from the release root. However, note that attaching to the system
should be done with extreme care, since the usual commands for
exiting an Elixir system, such as hitting Ctrl+C twice or Ctrl+\\,
will actually shut down the daemon. Therefore, using
`bin/RELEASE_NAME remote` should be preferred, even in daemon mode.
You can customize the tmp directory used both for logging and for
piping in daemon mode by setting the `RELEASE_TMP` environment
variable. See the "Customization" section.
### Services mode (Windows)
While daemons are not available on Windows, it is possible to install a
released system as a service on Windows with the help of
[`erlsrv`](https://erlang.org/doc/man/erlsrv.html). This can be done by
running:
bin/RELEASE_NAME install
Once installed, the service must be explicitly managed via the `erlsrv`
executable, which is included in the `erts-VSN/bin` directory.
The service is not started automatically after installing.
For example, if you have a release named `demo`, you can install
the service and then start it from the release root as follows:
bin/demo install
erts-VSN/bin/erlsrv.exe start demo_demo
The name of the service is `demo_demo` because the name is built
by concatenating the node name with the release name. Since Elixir
automatically uses the same name for both, the service will be
referenced as `demo_demo`.
The `install` command must be executed as an administrator.
### `bin/RELEASE_NAME` commands
The following commands are supported by `bin/RELEASE_NAME`:
start Starts the system
start_iex Starts the system with IEx attached
daemon Starts the system as a daemon (Unix-like only)
daemon_iex Starts the system as a daemon with IEx attached (Unix-like only)
install Installs this system as a Windows service (Windows only)
eval "EXPR" Executes the given expression on a new, non-booted system
rpc "EXPR" Executes the given expression remotely on the running system
remote Connects to the running system via a remote shell
restart Restarts the running system via a remote command
stop Stops the running system via a remote command
pid Prints the operating system PID of the running system via a remote command
version Prints the release name and version to be booted
## Deployments
### Requirements
A release is built on a **host**, a machine which contains Erlang, Elixir,
and any other dependencies needed to compile your application. A release is
then deployed to a **target**, potentially the same machine as the host,
but usually separate, and often there are many targets (either multiple
instances, or the release is deployed to heterogeneous environments).
To deploy straight from a host to a separate target without cross-compilation,
the following must be the same between the host and the target:
* Target architecture (for example, x86_64 or ARM)
* Target vendor + operating system (for example, Windows, Linux, or Darwin/macOS)
* Target ABI (for example, musl or gnu)
This is often represented in the form of target triples, for example,
`x86_64-unknown-linux-gnu`, `x86_64-unknown-linux-musl`, `x86_64-apple-darwin`.
So to be more precise, to deploy straight from a host to a separate target,
the Erlang Runtime System (ERTS), and any native dependencies (NIFs), must
be compiled for the same target triple. If you are building on a MacBook
(`x86_64-apple-darwin`) and trying to deploy to a typical Ubuntu machine
(`x86_64-unknown-linux-gnu`), the release will not work. Instead you should
build the release on a `x86_64-unknown-linux-gnu` host. As we will see, this
can be done in multiple ways, such as releasing on the target itself, or by
using virtual machines or containers, usually as part of your release pipeline.
In addition to matching the target triple, it is also important that the
target has all of the system packages that your application will need at
runtime. A common one is the need for OpenSSL when building an application
that uses `:crypto` or `:ssl`, which is dynamically linked to ERTS. The other
common source for native dependencies like this comes from dependencies
containing NIFs (natively-implemented functions) which may expect to
dynamically link to libraries they use.
Of course, some operating systems and package managers can differ between
versions, so if your goal is to have full compatibility between host and
target, it is best to ensure the operating system and system package manager
have the same versions on host and target. This may even be a requirement in
some systems, especially so with package managers that try to create fully
reproducible environments (Nix, Guix).
Similarly, when creating a stand-alone package and release for Windows, note
the Erlang Runtime System has a dependency to some Microsoft libraries
(Visual C++ Redistributable Packages for Visual Studio 2013). These libraries
are installed (if not present before) when Erlang is installed but it is not
part of the standard Windows environment. Deploying a stand-alone release on
a computer without these libraries will result in a failure when trying to
run the release. One way to solve this is to download and install these
Microsoft libraries the first time a release is deployed (the Erlang installer
version 10.6 ships with “Microsoft Visual C++ 2013 Redistributable - 12.0.30501”).
Alternatively, you can also bundle the compiled object files in the release,
as long as they were compiled for the same target. If doing so, you need to
update `LD_LIBRARY_PATH` environment variable with the paths containing the
bundled objects on Unix-like systems or the `PATH` environment variable on
Windows systems.
Currently, there is no official way to cross-compile a release from one
target triple to another, due to the complexities involved in the process.
### Techniques
There are a couple of ways to guarantee that a release is built on a host with
the same properties as the target. A simple option is to fetch the source,
compile the code and assemble the release on the target itself. It would
be something like this:
git clone remote://path/to/my_app.git my_app_source
cd my_app_source
mix deps.get --only prod
MIX_ENV=prod mix release
_build/prod/rel/my_app/bin/my_app start
If you prefer, you can also compile the release to a separate directory,
so you can erase all source after the release is assembled:
git clone remote://path/to/my_app.git my_app_source
cd my_app_source
mix deps.get --only prod
MIX_ENV=prod mix release --path ../my_app_release
cd ../my_app_release
rm -rf ../my_app_source
bin/my_app start
However, this option can be expensive if you have multiple production
nodes or if the release assembling process is a long one, as each node
needs to individually assemble the release.
You can automate this process in a couple different ways. One option
is to make it part of your Continuous Integration (CI) / Continuous
Deployment (CD) pipeline. When you have a CI/CD pipeline, it is common
that the machines in your CI/CD pipeline run on the exact same target
triple as your production servers (if they don't, they should).
In this case, you can assemble the release at the end of your CI/CD
pipeline by calling `MIX_ENV=prod mix release` and push the artifact
to S3 or any other network storage. To perform the deployment, your
production machines can fetch the deployment from the network storage
and run `bin/my_app start`.
Another mechanism to automate deployments is to use images, such as
Amazon Machine Images, or container platforms, such as Docker.
For instance, you can use Docker to run locally a system with the
exact same target triple as your production servers. Inside the
container, you can invoke `MIX_ENV=prod mix release` and build
a complete image and/or container with the operating system, all
dependencies as well as the releases.
In other words, there are multiple ways systems can be deployed and
releases can be automated and incorporated into all of them as long
as you remember to build the system in the same target triple.
Once a system is deployed, shutting down the system can be done by
sending SIGINT/SIGTERM to the system, which is what most containers,
platforms and tools do, or by explicitly invoking `bin/RELEASE_NAME stop`.
Once the system receives the shutdown request, each application and
their respective supervision trees will stop, one by one, in the
opposite order that they were started.
## Customization
There are a couple ways in which developers can customize the generated
artifacts inside a release.
### Options
The following options can be set inside your `mix.exs` on each release definition:
* `:applications` - a keyword list that configures and adds new applications
to the release. The key is the application name and the value is one of:
* `:permanent` - the application is started and the node shuts down
if the application terminates, regardless of reason
* `:transient` - the application is started and the node shuts down
if the application terminates abnormally
* `:temporary` - the application is started and the node does not
shut down if the application terminates
* `:load` - the application is only loaded
* `:none` - the application is part of the release but it is neither
loaded nor started
All applications default to `:permanent`.
By default `:applications` includes the current application and all
applications the current application depends on, recursively.
You can include new applications or change the mode of
existing ones by listing them here. The order of the applications given
in `:applications` will be preserved as much as possible, with only
`:kernel`, `:stdlib`, `:sasl`, and `:elixir` listed before the given
application list.
Releases assembled from an umbrella project require this configuration
to be explicitly given.
* `:strip_beams` - controls if BEAM files should have their debug information,
documentation chunks, and other non-essential metadata removed. Defaults to
`true`. May be set to `false` to disable stripping. Also accepts
`[keep: ["Docs", "Dbgi"]]` to keep certain chunks that are usually stripped.
* `:cookie` - a string representing the Erlang Distribution cookie. If this
option is not set, a random cookie is written to the `releases/COOKIE` file
when the first release is assembled. At runtime, we will first attempt
to fetch the cookie from the `RELEASE_COOKIE` environment variable and
then we'll read the `releases/COOKIE` file.
If you are setting this option manually, we recommend the cookie option
to be a long and randomly generated string, such as:
`Base.url_encode64(:crypto.strong_rand_bytes(40))`. We also recommend to restrict
the characters in the cookie to the subset returned by `Base.url_encode64/1`.
* `:validate_compile_env` - by default a release will match all runtime
configuration against any configuration that was marked at compile time
in your application of its dependencies via the `Application.compile_env/3`
function. If there is a mismatch between those, it means your system is
misconfigured and unable to boot. You can disable this check by setting
this option to false.
* `:path` - the path the release should be installed to.
Defaults to `"_build/MIX_ENV/rel/RELEASE_NAME"`.
* `:version` - the release version as a string or `{:from_app, app_name}`.
Defaults to the current application version. The `{:from_app, app_name}` format
can be used to easily reference the application version from another application.
This is particularly useful in umbrella applications.
* `:quiet` - a boolean that controls if releases should write steps to
the standard output. Defaults to `false`.
* `:include_erts` - a boolean, string, or anonymous function of arity zero.
If a boolean, it indicates whether the Erlang Runtime System (ERTS), which
includes the Erlang VM, should be included in the release. The default is
`true`, which is also the recommended value. If a string, it represents
the path to an existing ERTS installation. If an anonymous function of
arity zero, it's a function that returns any of the above (boolean or string).
You may also set this option to `false` if you desire to use the ERTS version installed
on the target. Note, however, that the ERTS version on the target must have **the
exact version** as the ERTS version used when the release is assembled. Setting it to
`false` also disables hot code upgrades. Therefore, `:include_erts` should be
set to `false` with caution and only if you are assembling the release on the
same server that runs it.
* `:include_executables_for` - a list of atoms detailing for which Operating
Systems executable files should be generated for. By default, it is set to
`[:unix, :windows]`. You can customize those as follows:
releases: [
demo: [
include_executables_for: [:unix] # Or [:windows] or []
]
]
* `:rel_templates_path` - the path to find template files that are copied to
the release, such as "vm.args.eex", "env.sh.eex" (or "env.bat.eex"), and
"overlays". Defaults to "rel" in the project root.
* `:overlays` - a list of directories with extra files to be copied
as is to the release. The "overlays" directory at `:rel_templates_path`
is always included in this list by default (typically at "rel/overlays").
See the "Overlays" section for more information.
* `:steps` - a list of steps to execute when assembling the release. See
the "Steps" section for more information.
Note each release definition can be given as an anonymous function. This
is useful if some release attributes are expensive to compute:
releases: [
demo: fn ->
[version: @version <> "+" <> git_ref()]
end
]
Besides the options above, it is possible to customize the generated
release with custom files, by tweaking the release steps or by running
custom options and commands on boot. We will detail both approaches next.
### Overlays
Often it is necessary to copy extra files to the release root after
the release is assembled. This can be easily done by placing such
files in the `rel/overlays` directory. Any file in there is copied
as is to the release root. For example, if you have placed a
"rel/overlays/Dockerfile" file, the "Dockerfile" will be copied as
is to the release root.
If you want to specify extra overlay directories, you can do so
with the `:overlays` option. If you need to copy files dynamically,
see the "Steps" section.
### Steps
It is possible to add one or more steps before and after the release is
assembled. This can be done with the `:steps` option:
releases: [
demo: [
steps: [&set_configs/1, :assemble, ©_extra_files/1]
]
]
The `:steps` option must be a list and it must always include the
atom `:assemble`, which does most of the release assembling. You
can pass anonymous functions before and after the `:assemble` to
customize your release assembling pipeline. Those anonymous functions
will receive a `Mix.Release` struct and must return the same or
an updated `Mix.Release` struct. It is also possible to build a tarball
of the release by passing the `:tar` step anywhere after `:assemble`.
If the release `:path` is not configured, the tarball is created in
`_build/MIX_ENV/RELEASE_NAME-RELEASE_VSN.tar.gz` Otherwise it is
created inside the configured `:path`.
See `Mix.Release` for more documentation on the struct and which
fields can be modified. Note that the `:steps` field itself can be
modified and it is updated every time a step is called. Therefore,
if you need to execute a command before and after assembling the
release, you only need to declare the first steps in your pipeline
and then inject the last step into the release struct. The steps
field can also be used to verify if the step was set before or
after assembling the release.
### vm.args and env.sh (env.bat)
Developers may want to customize the VM flags and environment variables
given when the release starts. This is typically done by customizing
two files inside your release: `releases/RELEASE_VSN/vm.args` and
`releases/RELEASE_VSN/env.sh` (or `env.bat` on Windows).
However, instead of modifying those files after the release is built,
the simplest way to customize those files is by running `mix release.init`.
The Mix task will copy custom `rel/vm.args.eex`, `rel/env.sh.eex`, and
`rel/env.bat.eex` files to your project root. You can modify those
files and they will be evaluated every time you perform a new release.
Those files are regular EEx templates and they have a single assign,
called `@release`, with the `Mix.Release` struct.
The `vm.args` file may contain any of the VM flags accepted by the [`erl`
command](https://erlang.org/doc/man/erl.html).
The `env.sh` and `env.bat` is used to set environment variables.
In there, you can set vars such as `RELEASE_NODE`, `RELEASE_COOKIE`,
and `RELEASE_TMP` to customize your node name, cookie and tmp
directory respectively. Whenever `env.sh` or `env.bat` is invoked,
the variables `RELEASE_ROOT`, `RELEASE_NAME`, `RELEASE_VSN`, and
`RELEASE_COMMAND` have already been set, so you can rely on them.
See the section on environment variables for more information.
Furthermore, while `vm.args` is static, you can use `env.sh` and
`env.bat` to dynamically set VM options. For example, if you want
to make sure the Erlang Distribution listens only on a given port
known at runtime, you can set the following:
case $RELEASE_COMMAND in
start*|daemon*)
ELIXIR_ERL_OPTIONS="-kernel inet_dist_listen_min $BEAM_PORT inet_dist_listen_max $BEAM_PORT"
export ELIXIR_ERL_OPTIONS
;;
*)
;;
esac
Note we only set the port on start/daemon commands. If you also limit
the port on other commands, such as `rpc`, then you will be unable
to establish a remote connection as the port will already be in use
by the node.
On Windows, your `env.bat` would look like this:
IF NOT %RELEASE_COMMAND:start=%==%RELEASE_COMMAND% (
set ELIXIR_ERL_OPTIONS="-kernel inet_dist_listen_min %BEAM_PORT% inet_dist_listen_max %BEAM_PORT%"
)
## Application configuration
Releases provides two mechanisms for configuring OTP applications:
build-time and runtime.
### Build-time configuration
Whenever you invoke a `mix` command, Mix loads the configuration
in `config/config.exs`, if said file exists. It is common for the
`config/config.exs` file itself to import other configuration based
on the current `MIX_ENV`, such as `config/dev.exs`, `config/test.exs`,
and `config/prod.exs`. We say that this configuration is a build-time
configuration as it is evaluated whenever you compile your code or
whenever you assemble the release.
In other words, if your configuration does something like:
config :my_app, :secret_key, System.fetch_env!("MY_APP_SECRET_KEY")
The `:secret_key` key under `:my_app` will be computed on the
host machine, whenever the release is built. Setting the
`MY_APP_SECRET_KEY` right before starting your release will have
no effect.
Luckily, releases also provide runtime configuration, which we will
see next.
### Runtime configuration
To enable runtime configuration in your release, all you need to do is
to create a file named `config/runtime.exs`:
import Config
config :my_app, :secret_key, System.fetch_env!("MY_APP_SECRET_KEY")
This file will be executed whenever your Mix project or your release
starts.
Your `config/runtime.exs` file needs to follow three important rules:
* It MUST `import Config` at the top instead of the deprecated `use Mix.Config`
* It MUST NOT import any other configuration file via `import_config`
* It MUST NOT access `Mix` in any way, as `Mix` is a build tool and
it is not available inside releases
If a `config/runtime.exs` exists, it will be copied to your release
and executed early in the boot process, when only Elixir and Erlang's
main applications have been started. Once the configuration is loaded,
the Erlang system will be restarted (within the same Operating System
process) and the new configuration will take place.
You can change the path to the runtime configuration file by setting
`:runtime_config_path` inside each release configuration. This path is
resolved at build time as the given configuration file is always copied
to inside the release:
releases: [
demo: [
runtime_config_path: ...
]
]
Finally, in order for runtime configuration to work properly (as well
as any other "Config provider" as defined next), it needs to be able
to persist the newly computed configuration to disk. The computed config
file will be written to "tmp" directory inside the release every time
the system boots. You can configure the "tmp" directory by setting the
`RELEASE_TMP` environment variable, either explicitly or inside your
`releases/RELEASE_VSN/env.sh` (or `env.bat` on Windows).
### Config providers
Releases also supports custom mechanisms, called config providers, to load
any sort of runtime configuration to the system while it boots. For instance,
if you need to access a vault or load configuration from a JSON file, it
can be achieved with config providers. The runtime configuration outlined
in the previous section, which is handled by the `Config.Reader` provider.
See the `Config.Provider` module for more information and more examples.
The following options can be set inside your releases key in your `mix.exs`
to control how config providers work:
* `:reboot_system_after_config` - every time your release is configured,
the system is rebooted to allow the new configuration to take place.
You can set this option to `false` to disable the rebooting for applications
that are sensitive to boot time but, in doing so, note you won't be able
to configure system applications, such as `:kernel` and `:stdlib`.
Defaults to `true` if using the deprecated `config/releases.exs`,
`false` otherwise.
* `:prune_runtime_sys_config_after_boot` - if `:reboot_system_after_config`
is set, every time your system boots, the release will write a config file
to your tmp directory. These configuration files are generally small.
But if you are concerned with disk space or if you have other restrictions,
you can ask the system to remove said config files after boot. The downside
is that you will no longer be able to restart the system internally (neither
via `System.restart/0` nor `bin/RELEASE_NAME restart`). If you need a restart,
you will have to terminate the Operating System process and start a new
one. Defaults to `false`.
* `:start_distribution_during_config` - if `:reboot_system_after_config` is
set, releases only start the Erlang VM distribution features after the config
files are evaluated. You can set it to `true` if you need distribution during
configuration. Defaults to `false` from Erlang/OTP 22+.
* `:config_providers` - a list of tuples with custom config providers.
See `Config.Provider` for more information. Defaults to `[]`.
### Customization and configuration summary
Generally speaking, the following files are available for customizing
and configuring the running system:
* `config/config.exs` (and `config/prod.exs`) - provides build-time
application configuration, which are executed when the release is
assembled
* `config/runtime.exs` - provides runtime application configuration.
It is executed every time your Mix project or your release boots
and is further extensible via config providers. If you want to
detect you are inside a release, you can check for release specific
environment variables, such as `RELEASE_NODE` or `RELEASE_MODE`
* `rel/vm.args.eex` - a template file that is copied into every release
and provides static configuration of the Erlang Virtual Machine and
other runtime flags
* `rel/env.sh.eex` and `rel/env.bat.eex` - template files that are copied
into every release and are executed on every command to set up environment
variables, including specific ones to the VM, and the general environment
## Directory structure
A release is organized as follows:
bin/
RELEASE_NAME
erts-ERTS_VSN/
lib/
APP_NAME-APP_VSN/
ebin/
include/
priv/
releases/
RELEASE_VSN/
consolidated/
elixir
elixir.bat
env.bat
env.sh
iex
iex.bat
runtime.exs
start.boot
start.script
start_clean.boot
start_clean.script
sys.config
vm.args
COOKIE
start_erl.data
tmp/
## Environment variables
The system sets different environment variables. The following variables
are set early on and can only be read by `env.sh` and `env.bat`:
* `RELEASE_ROOT` - points to the root of the release. If the system
includes ERTS, then it is the same as `:code.root_dir/0`. This
variable is always computed and it cannot be set to a custom value
* `RELEASE_COMMAND` - the command given to the release, such as `"start"`,
`"remote"`, `"eval"`, and so on. This is typically accessed inside `env.sh`
and `env.bat` to set different environment variables under different
conditions. Note, however, that `RELEASE_COMMAND` has not been
validated by the time `env.sh` and `env.bat` are called, so it may
be empty or contain invalid values. This variable is always computed
and it cannot be set to a custom value
* `RELEASE_NAME` - the name of the release. It can be set to a custom
value when invoking the release
* `RELEASE_VSN` - the version of the release, otherwise the latest
version is used. It can be set to a custom value when invoking the
release. The custom value must be an existing release version in
the `releases/` directory
* `RELEASE_PROG` - the command line executable used to start the release
The following variables can be set before you invoke the release or
inside `env.sh` and `env.bat`:
* `RELEASE_COOKIE` - the release cookie. By default uses the value
in `releases/COOKIE`. It can be set to a custom value
* `RELEASE_NODE` - the release node name, in the format `name@host`.
It can be set to a custom value. The name part must be made only
of letters, digits, underscores, and hyphens
* `RELEASE_SYS_CONFIG` - the location of the sys.config file. It can
be set to a custom path and it must not include the `.config` extension
* `RELEASE_VM_ARGS` - the location of the vm.args file. It can be set
to a custom path
* `RELEASE_TMP` - the directory in the release to write temporary
files to. It can be set to a custom directory. It defaults to
`$RELEASE_ROOT/tmp`
* `RELEASE_MODE` - if the release should start in embedded or
interactive mode. Defaults to "embedded". It applies only to
start/daemon/install commands
* `RELEASE_DISTRIBUTION` - how do we want to run the distribution.
May be `name` (long names), `sname` (short names) or `none`
(distribution is not started automatically). Defaults to
`sname` which allows access only within the current system.
`name` allows external connections. If `name` is used and you are
not running on Erlang/OTP 22 or later, you must set `RELEASE_NODE`
to `[email protected]` with an IP or a known host
* `RELEASE_BOOT_SCRIPT` - the name of the boot script to use when starting
the release. This script is used when running commands such as `start` and
`daemon`. The boot script is expected to be located at the
path `releases/RELEASE_VSN/RELEASE_BOOT_SCRIPT.boot`. Defaults to `start`
* `RELEASE_BOOT_SCRIPT_CLEAN` - the name of the boot script used when
starting the release clean, without your application or its dependencies.
This script is used by commands such as `eval`, `rpc`, and `remote`.
The boot script is expected to be located at the path
`releases/RELEASE_VSN/RELEASE_BOOT_SCRIPT_CLEAN.boot`. Defaults
to `start_clean`
## Umbrellas
Releases are well integrated with umbrella projects, allowing you to
release one or more subsets of your umbrella children. The only difference
between performing a release in the umbrella project compared to a
regular application is that umbrellas require you to explicitly list
your release and the starting point for each release. For example,
imagine this umbrella applications:
my_app_umbrella/
apps/
my_app_core/
my_app_event_processing/
my_app_web/
where both `my_app_event_processing` and `my_app_web` depend on
`my_app_core` but they do not depend on each other.
Inside your umbrella, you can define multiple releases:
releases: [
web_and_event_processing: [
applications: [
my_app_event_processing: :permanent,
my_app_web: :permanent
]
],
web_only: [
applications: [my_app_web: :permanent]
],
event_processing_only: [
applications: [my_app_event_processing: :permanent]
]
]
Note you don't need to define all applications in `:applications`,
only the entry points. Also remember that the recommended mode
for all applications in the system is `:permanent`.
Finally, keep in mind it is not required for you to assemble the
release from the umbrella root. You can also assemble the release
from each child application individually. Doing it from the root,
however, allows you to include two applications that do not depend
on each other as part of the same release.
## Hot Code Upgrades
Erlang and Elixir are sometimes known for the capability of upgrading
a node that is running in production without shutting down that node.
However, this feature is not supported out of the box by Elixir releases.
The reason we don't provide hot code upgrades is because they are very
complicated to perform in practice, as they require careful coding of
your processes and applications as well as extensive testing. Given most
teams can use other techniques that are language agnostic to upgrade
their systems, such as Blue/Green deployments, Canary deployments,
Rolling deployments, and others, hot upgrades are rarely a viable
option. Let's understand why.
In a hot code upgrade, you want to update a node from version A to
version B. To do so, the first step is to write recipes for every application
that changed between those two releases, telling exactly how the application
changed between versions, those recipes are called `.appup` files.
While some of the steps in building `.appup` files can be automated,
not all of them can. Furthermore, each process in the application needs
to be explicitly coded with hot code upgrades in mind. Let's see an example.
Imagine your application has a counter process as a GenServer:
defmodule Counter do
use GenServer
def start_link(_) do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
def bump do
GenServer.call(__MODULE__, :bump)
end
## Callbacks
def init(:ok) do
{:ok, 0}
end
def handle_call(:bump, counter) do
{:reply, :ok, counter + 1}
end
end
You add this process as part of your supervision tree and ship version
0.1.0 of your system. Now let's imagine that on version 0.2.0 you added
two changes: instead of `bump/0`, that always increments the counter by
one, you introduce `bump/1` that passes the exact value to bump the
counter. You also change the state, because you want to store the maximum
bump value:
defmodule Counter do
use GenServer
def start_link(_) do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
def bump(by) do
GenServer.call(__MODULE__, {:bump, by})
end
## Callbacks
def init(:ok) do
{:ok, {0, 0}}
end
def handle_call({:bump, by}, {counter, max}) do
{:reply, :ok, {counter + by, max(max, by)}}
end
end
If you were to perform a hot code upgrade in such an application, it would
crash, because in the initial version the state was just a counter
but in the new version the state is a tuple. Furthermore, you changed
the format of the `call` message from `:bump` to `{:bump, by}` and
the process may have both old and new messages temporarily mixed, so
we need to handle both. The final version would be:
defmodule Counter do
use GenServer
def start_link(_) do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
def bump(by) do
GenServer.call(__MODULE__, {:bump, by})
end
## Callbacks
def init(:ok) do
{:ok, {0, 0}}
end
def handle_call(:bump, {counter, max}) do
{:reply, :ok, {counter + 1, max(max, 1)}}
end
def handle_call({:bump, by}, {counter, max}) do
{:reply, :ok, {counter + by, max(max, by)}}
end
def code_change(_, counter, _) do
{:ok, {counter, 0}}
end
end
Now you can proceed to list this process in the `.appup` file and
hot code upgrade it. This is one of the many steps necessary
to perform hot code upgrades and it must be taken into account by
every process and application being upgraded in the system.
The [`.appup` cookbook](https://erlang.org/doc/design_principles/appup_cookbook.html)
provides a good reference and more examples.
Once `.appup`s are created, the next step is to create a `.relup`
file with all instructions necessary to update the release itself.
Erlang documentation does provide a chapter on
[Creating and upgrading a target system](https://erlang.org/doc/system_principles/create_target.html).
[Learn You Some Erlang has a chapter on hot code upgrades](https://learnyousomeerlang.com/relups).
Overall, there are many steps, complexities and assumptions made
during hot code upgrades, which is ultimately why they are not
provided by Elixir out of the box. However, hot code upgrades can
still be achieved by teams who desire to implement those steps
on top of `mix release` in their projects or as separate libraries.
## Command line options
* `--force` - forces recompilation
* `--no-archives-check` - does not check archive
* `--no-deps-check` - does not check dependencies
* `--no-elixir-version-check` - does not check Elixir version
* `--no-compile` - does not compile before assembling the release
* `--overwrite` - if there is an existing release version, overwrite it
* `--path` - the path of the release
* `--quiet` - does not write progress to the standard output
* `--version` - the version of the release
"""
import Mix.Generator
@switches [
overwrite: :boolean,
force: :boolean,
quiet: :boolean,
path: :string,
version: :string,
compile: :boolean,
deps_check: :boolean,
archives_check: :boolean,
elixir_version_check: :boolean
]
@aliases [
f: :force
]
@impl true
def run(args) do
Mix.Project.get!()
Mix.Task.run("compile", args)
config = Mix.Project.config()
release =
case OptionParser.parse!(args, strict: @switches, aliases: @aliases) do
{overrides, [name]} -> Mix.Release.from_config!(String.to_atom(name), config, overrides)
{overrides, []} -> Mix.Release.from_config!(nil, config, overrides)
{_, _} -> Mix.raise("Expected \"mix release\" or \"mix release NAME\"")
end
if not File.exists?(release.version_path) or
yes?(release, "Release #{release.name}-#{release.version} already exists. Overwrite?") do
run_steps(release)
end
end
defp yes?(release, message) do
release.options[:overwrite] or Mix.shell().yes?(message)
end
defp run_steps(%{steps: [step | steps]} = release) when is_function(step) do
case step.(%{release | steps: steps}) do
%Mix.Release{} = release ->
run_steps(release)
other ->
Mix.raise(
"Expected step #{inspect(step)} to return a Mix.Release, got: #{inspect(other)}"
)
end
end
defp run_steps(%{steps: [:tar | steps]} = release) do
%{release | steps: steps} |> make_tar() |> run_steps()
end
defp run_steps(%{steps: [:assemble | steps]} = release) do
%{release | steps: steps} |> assemble() |> run_steps()
end
defp run_steps(%{steps: []} = release) do
announce(release)
end
defp assemble(release) do
config = Mix.Project.config()
message = "#{release.name}-#{release.version} on MIX_ENV=#{Mix.env()}"
info(release, [:green, "* assembling ", :reset, message])
# releases/
# VERSION/
# consolidated/
# NAME.rel
# start.boot
# start.script
# start_clean.boot
# start_clean.script
# sys.config
# releases/
# COOKIE
# start_erl.data
consolidation_path = build_rel(release, config)
[
# erts-VSN/
:erts,
# releases/VERSION/consolidated
{:consolidated, consolidation_path},
# bin/
# RELEASE_NAME
# RELEASE_NAME.bat
# start
# start.bat
# releases/
# VERSION/
# elixir
# elixir.bat
# iex
# iex.bat
{:executables, Keyword.get(release.options, :include_executables_for, [:unix, :windows])}
# lib/APP_NAME-APP_VSN/
| Map.keys(release.applications)
]
|> Task.async_stream(©(&1, release), ordered: false, timeout: :infinity)
|> Stream.run()
copy_overlays(release)
end
defp make_tar(release) do
build_path = Mix.Project.build_path()
dir_path =
if release.path == Path.join([build_path, "rel", Atom.to_string(release.name)]) do
build_path
else
release.path
end
out_path = Path.join(dir_path, "#{release.name}-#{release.version}.tar.gz")
info(release, [:green, "* building ", :reset, out_path])
lib_dirs =
Enum.reduce(release.applications, [], fn {name, app_config}, acc ->
vsn = Keyword.fetch!(app_config, :vsn)
[Path.join("lib", "#{name}-#{vsn}") | acc]
end)
erts_dir =
case release.erts_source do
nil -> []
_ -> ["erts-#{release.erts_version}"]
end
release_files =
for basename <- File.ls!(Path.join(release.path, "releases")),
not File.dir?(Path.join([release.path, "releases", basename])),
do: Path.join("releases", basename)
dirs =
["bin", Path.join("releases", release.version)] ++
erts_dir ++ lib_dirs ++ release_files
files =
dirs
|> Enum.filter(&File.exists?(Path.join(release.path, &1)))
|> Kernel.++(release.overlays)
|> Enum.map(&{String.to_charlist(&1), String.to_charlist(Path.join(release.path, &1))})
File.rm(out_path)
:ok = :erl_tar.create(String.to_charlist(out_path), files, [:dereference, :compressed])
release
end
# build_rel
defp build_rel(release, config) do
version_path = release.version_path
File.rm_rf!(version_path)
File.mkdir_p!(version_path)
release = maybe_add_config_reader_provider(config, release, version_path)
consolidation_path =
if config[:consolidate_protocols] do
Mix.Project.consolidation_path(config)
end
sys_config =
if File.regular?(config[:config_path]) do
config[:config_path] |> Config.Reader.read!(env: Mix.env(), target: Mix.target())
else
[]
end
vm_args_path = Path.join(version_path, "vm.args")
cookie_path = Path.join(release.path, "releases/COOKIE")
start_erl_path = Path.join(release.path, "releases/start_erl.data")
config_provider_path = {:system, "RELEASE_SYS_CONFIG", ".config"}
with :ok <- make_boot_scripts(release, version_path, consolidation_path),
:ok <- make_vm_args(release, vm_args_path),
:ok <- Mix.Release.make_sys_config(release, sys_config, config_provider_path),
:ok <- Mix.Release.make_cookie(release, cookie_path),
:ok <- Mix.Release.make_start_erl(release, start_erl_path) do
consolidation_path
else
{:error, message} ->
File.rm_rf!(version_path)
Mix.raise(message)
end
end
defp maybe_add_config_reader_provider(config, %{options: opts} = release, version_path) do
default_path = config[:config_path] |> Path.dirname() |> Path.join("runtime.exs")
deprecated_path = config[:config_path] |> Path.dirname() |> Path.join("releases.exs")
{path, reboot?} =
cond do
path = opts[:runtime_config_path] ->
{path, false}
File.exists?(default_path) ->
if File.exists?(deprecated_path) do
IO.warn(
"both #{inspect(default_path)} and #{inspect(deprecated_path)} have been " <>
"found, but only #{inspect(default_path)} will be used"
)
end
{default_path, false}
File.exists?(deprecated_path) ->
# TODO: Warn from Elixir v1.13 onwards
{deprecated_path, true}
true ->
{nil, false}
end
cond do
path ->
msg = "#{path} to configure the release at runtime"
Mix.shell().info([:green, "* using ", :reset, msg])
File.cp!(path, Path.join(version_path, "runtime.exs"))
init = {:system, "RELEASE_ROOT", "/releases/#{release.version}/runtime.exs"}
opts = [path: init, env: Mix.env(), target: Mix.target(), imports: :disabled]
release = update_in(release.config_providers, &[{Config.Reader, opts} | &1])
update_in(release.options, &Keyword.put_new(&1, :reboot_system_after_config, reboot?))
release.config_providers == [] ->
skipping("runtime configuration (#{default_path} not found)")
release
true ->
release
end
end
defp make_boot_scripts(release, version_path, consolidation_path) do
prepend_paths =
if consolidation_path do
["$RELEASE_LIB/../releases/#{release.version}/consolidated"]
else
[]
end
results =
for {boot_name, modes} <- release.boot_scripts do
sys_path = Path.join(version_path, Atom.to_string(boot_name))
with :ok <- Mix.Release.make_boot_script(release, sys_path, modes, prepend_paths) do
if boot_name == :start do
rel_path = Path.join(Path.dirname(sys_path), "#{release.name}.rel")
File.rename!(sys_path <> ".rel", rel_path)
else
File.rm(sys_path <> ".rel")
end
:ok
end
end
Enum.find(results, :ok, &(&1 != :ok))
end
defp make_vm_args(release, path) do
vm_args_template = Mix.Release.rel_templates_path(release, "vm.args.eex")
if File.exists?(vm_args_template) do
copy_template(vm_args_template, path, [release: release], force: true)
else
File.write!(path, vm_args_template(release: release))
end
:ok
end
defp announce(release) do
path = Path.relative_to_cwd(release.path)
cmd = "#{path}/bin/#{release.name}"
info(release, """
Release created at #{path}!
# To start your system
#{cmd} start
Once the release is running:
# To connect to it remotely
#{cmd} remote
# To stop it gracefully (you may also send SIGINT/SIGTERM)
#{cmd} stop
To list all commands:
#{cmd}
""")
end
defp info(release, message) do
unless release.options[:quiet] do
Mix.shell().info(message)
end
end
defp skipping(message) do
Mix.shell().info([:yellow, "* skipping ", :reset, message])
end
## Overlays
defp copy_overlays(release) do
target = release.path
default = Mix.Release.rel_templates_path(release, "overlays")
overlays =
if File.dir?(default) do
[default | List.wrap(release.options[:overlays])]
else
List.wrap(release.options[:overlays])
end
relative =
overlays
|> Enum.flat_map(&File.cp_r!(&1, target))
|> Enum.uniq()
|> List.delete(target)
|> Enum.map(&Path.relative_to(&1, target))
update_in(release.overlays, &(relative ++ &1))
end
## Copy operations
defp copy(:erts, release) do
_ = Mix.Release.copy_erts(release)
:ok
end
defp copy(app, release) when is_atom(app) do
Mix.Release.copy_app(release, app)
end
defp copy({:consolidated, consolidation_path}, release) do
if consolidation_path do
consolidation_target = Path.join(release.version_path, "consolidated")
_ = Mix.Release.copy_ebin(release, consolidation_path, consolidation_target)
end
:ok
end
defp copy({:executables, include_executables_for}, release) do
elixir_bin_path = Application.app_dir(:elixir, "../../bin")
bin_path = Path.join(release.path, "bin")
File.mkdir_p!(bin_path)
for os <- include_executables_for do
{env, env_fun, clis} = cli_for(os, release)
env_path = Path.join(release.version_path, env)
env_template_path = Mix.Release.rel_templates_path(release, env <> ".eex")
if File.exists?(env_template_path) do
copy_template(env_template_path, env_path, [release: release], force: true)
else
File.write!(env_path, env_fun.(release))
end
for {filename, contents} <- clis do
target = Path.join(bin_path, filename)
File.write!(target, contents)
executable!(target)
end
for {filename, contents_fun} <- elixir_cli_for(os, release) do
source = Path.join(elixir_bin_path, filename)
if File.regular?(source) do
target = Path.join(release.version_path, filename)
File.write!(target, contents_fun.(source))
executable!(target)
else
skipping("#{filename} for #{os} (bin/#{filename} not found in the Elixir installation)")
end
end
end
end
defp cli_for(:unix, release) do
{"env.sh", &env_template(release: &1), [{"#{release.name}", cli_template(release: release)}]}
end
defp cli_for(:windows, release) do
{"env.bat", &env_bat_template(release: &1),
[{"#{release.name}.bat", cli_bat_template(release: release)}]}
end
defp elixir_cli_for(:unix, release) do
[
{"elixir",
&(&1
|> File.read!()
|> String.replace(~s[ -pa "$SCRIPT_PATH"/../lib/*/ebin], "")
|> replace_erts_bin(release, ~s["$SCRIPT_PATH"/../../erts-#{release.erts_version}/bin/]))},
{"iex", &File.read!/1}
]
end
defp elixir_cli_for(:windows, release) do
[
{"elixir.bat",
&(&1
|> File.read!()
|> String.replace(~s[goto expand_erl_libs], ~s[goto run])
|> replace_erts_bin(release, ~s[%~dp0\\..\\..\\erts-#{release.erts_version}\\bin\\]))},
{"iex.bat", &File.read!/1}
]
end
defp replace_erts_bin(contents, release, new_path) do
if release.erts_source do
String.replace(contents, ~s[ERTS_BIN=], ~s[ERTS_BIN=#{new_path}])
else
contents
end
end
defp executable!(path), do: File.chmod!(path, 0o755)
# Helper functions
defp release_mode(release, env_var) do
# TODO: Remove otp_release check once we require Erlang/OTP 23+
otp_gte_23? = :erlang.system_info(:otp_release) >= '23'
reboot? = Keyword.get(release.options, :reboot_system_after_config, false)
if otp_gte_23? and reboot? and release.config_providers != [] do
"-elixir -config_provider_reboot_mode #{env_var}"
else
"-mode #{env_var}"
end
end
embed_template(:vm_args, Mix.Tasks.Release.Init.vm_args_text())
embed_template(:env, Mix.Tasks.Release.Init.env_text())
embed_template(:cli, Mix.Tasks.Release.Init.cli_text())
embed_template(:env_bat, Mix.Tasks.Release.Init.env_bat_text())
embed_template(:cli_bat, Mix.Tasks.Release.Init.cli_bat_text())
end
| 39.325517 | 106 | 0.695872 |
9efcfdde47a2ca7d9c70acebccd6f7e0c52da03e | 1,649 | ex | Elixir | clients/calendar/lib/google_api/calendar/v3/model/setting.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/calendar/lib/google_api/calendar/v3/model/setting.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/calendar/lib/google_api/calendar/v3/model/setting.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Calendar.V3.Model.Setting do
@moduledoc """
## Attributes
- etag (String): ETag of the resource. Defaults to: `null`.
- id (String): The id of the user setting. Defaults to: `null`.
- kind (String): Type of the resource (\"calendar#setting\"). Defaults to: `null`.
- value (String): Value of the user setting. The format of the value depends on the ID of the setting. It must always be a UTF-8 string of length up to 1024 characters. Defaults to: `null`.
"""
defstruct [
:"etag",
:"id",
:"kind",
:"value"
]
end
defimpl Poison.Decoder, for: GoogleApi.Calendar.V3.Model.Setting do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.Calendar.V3.Model.Setting do
def encode(value, options) do
GoogleApi.Calendar.V3.Deserializer.serialize_non_nil(value, options)
end
end
| 31.711538 | 191 | 0.724075 |
9efd337db20b135d09a9c0f6ccc7d5a589013b05 | 477 | ex | Elixir | phoenix_sideque_poc/lib/phoenix_sideque_poc_web/views/error_helpers.ex | waferelectronics/rails_phoenix_sidekiq_communication | e9cee8fc6b39106e9acdf306140e840e0c286722 | [
"MIT"
] | null | null | null | phoenix_sideque_poc/lib/phoenix_sideque_poc_web/views/error_helpers.ex | waferelectronics/rails_phoenix_sidekiq_communication | e9cee8fc6b39106e9acdf306140e840e0c286722 | [
"MIT"
] | null | null | null | phoenix_sideque_poc/lib/phoenix_sideque_poc_web/views/error_helpers.ex | waferelectronics/rails_phoenix_sidekiq_communication | e9cee8fc6b39106e9acdf306140e840e0c286722 | [
"MIT"
] | 1 | 2020-06-05T12:39:23.000Z | 2020-06-05T12:39:23.000Z | defmodule PhoenixSidequePocWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
@doc """
Translates an error message.
"""
def translate_error({msg, opts}) do
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
Enum.reduce(opts, msg, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end
end
| 28.058824 | 69 | 0.683438 |
9efd835e9d05b6ea507a7c615d0a1b431987f955 | 275 | ex | Elixir | lib/price_tracker/repo.ex | kevinbenard/price_tracker | 0a7248edd4afca21f00e8b0fc05561f97799c0ca | [
"MIT"
] | null | null | null | lib/price_tracker/repo.ex | kevinbenard/price_tracker | 0a7248edd4afca21f00e8b0fc05561f97799c0ca | [
"MIT"
] | null | null | null | lib/price_tracker/repo.ex | kevinbenard/price_tracker | 0a7248edd4afca21f00e8b0fc05561f97799c0ca | [
"MIT"
] | null | null | null | defmodule PriceTracker.Repo do
use Ecto.Repo, otp_app: :price_tracker
@doc """
Dynamically loads the repository url from the
DATABASE_URL environment variable.
"""
def init(_, opts) do
{:ok, Keyword.put(opts, :url, System.get_env("DATABASE_URL"))}
end
end
| 22.916667 | 66 | 0.709091 |
9efda1b3d4d60d84ffbddb6b68b2f09d1078e459 | 657 | exs | Elixir | test/sap/combinators/http_test.exs | slogsdon/sap | 766f06cfac8a04772affd977a88d61210064e598 | [
"MIT"
] | 7 | 2015-10-25T16:38:45.000Z | 2020-01-12T19:06:57.000Z | test/sap/combinators/http_test.exs | slogsdon/sap | 766f06cfac8a04772affd977a88d61210064e598 | [
"MIT"
] | null | null | null | test/sap/combinators/http_test.exs | slogsdon/sap | 766f06cfac8a04772affd977a88d61210064e598 | [
"MIT"
] | null | null | null | defmodule Sap.Combinators.HttpTest do
use ExUnit.Case, async: true
use Plug.Test
import Sap.Combinators.Http
test "get no match" do
conn1 = conn(:post, "/")
resp1 = get().(conn1)
assert resp1.status == :error
assert resp1.conn == conn1
conn2 = conn(:put, "/")
resp2 = get().(conn2)
assert resp2.status == :error
assert resp2.conn == conn2
conn3 = conn(:delete, "/")
resp3 = get().(conn3)
assert resp3.status == :error
assert resp3.conn == conn3
end
test "get match" do
conn = conn(:get, "/")
resp = get().(conn)
assert resp.status == :ok
assert resp.conn == conn
end
end
| 19.323529 | 37 | 0.596651 |
9efdac6f9416640cb01df23a4d91799b96a4ff41 | 17,386 | ex | Elixir | lib/plasm.ex | facto/plasm | cd8a713e8409738f776f77cecbc65e2f0f42c146 | [
"Apache-2.0"
] | 56 | 2016-07-31T19:35:49.000Z | 2021-10-21T20:32:42.000Z | lib/plasm.ex | atomic-fads/plasm | cd8a713e8409738f776f77cecbc65e2f0f42c146 | [
"Apache-2.0"
] | 2 | 2016-01-27T17:38:55.000Z | 2016-05-21T18:33:14.000Z | lib/plasm.ex | atomic-fads/plasm | cd8a713e8409738f776f77cecbc65e2f0f42c146 | [
"Apache-2.0"
] | 3 | 2016-01-27T14:00:55.000Z | 2016-05-21T18:06:56.000Z | defmodule Plasm do
import Ecto.Query
@doc """
Builds a query that finds all records at a specified date and time for a specified field name.
Puppy |> Plasm.at(:updated_at, date_time) |> Repo.all
Puppy |> Plasm.at(:updated_at, "2014-04-17T14:00:00Z") |> Repo.all
"""
@spec at(Ecto.Queryable.t(), atom, %DateTime{}) :: Ecto.Queryable.t()
def at(query, field_name, %DateTime{} = date_time) when is_atom(field_name) do
query
|> where([x], field(x, ^field_name) == ^date_time)
end
@spec at(Ecto.Queryable.t(), atom, any) :: Ecto.Queryable.t()
def at(query, field_name, castable) when is_atom(field_name) do
date_time = cast_to_date_time!(castable)
query
|> at(field_name, date_time)
end
@doc """
Builds a query that finds all records at or before a specified date and time for a specified field name.
Puppy |> Plasm.at_or_earlier_than(:updated_at, date_time) |> Repo.all
Puppy |> Plasm.at_or_earlier_than(:updated_at, "2014-04-17") |> Repo.all
"""
@spec at_or_earlier_than(Ecto.Queryable.t(), atom, %DateTime{}) :: Ecto.Queryable.t()
def at_or_earlier_than(query, field_name, %DateTime{} = date_time) when is_atom(field_name) do
query
|> where([x], field(x, ^field_name) <= ^date_time)
end
@spec at_or_earlier_than(Ecto.Queryable.t(), atom, any) :: Ecto.Queryable.t()
def at_or_earlier_than(query, field_name, castable) when is_atom(field_name) do
date_time = cast_to_date_time!(castable)
query
|> at_or_earlier_than(field_name, date_time)
end
@doc """
Builds a query that finds all records at a specified date and time for a specified field name.
Puppy |> Plasm.at_or_later_than(:updated_at, date_time) |> Repo.all
Puppy |> Plasm.at_or_later_than(:updated_at, "2014-04-17") |> Repo.all
"""
@spec at_or_later_than(Ecto.Queryable.t(), atom, %DateTime{}) :: Ecto.Queryable.t()
def at_or_later_than(query, field_name, %DateTime{} = date_time) when is_atom(field_name) do
query
|> where([x], field(x, ^field_name) >= ^date_time)
end
@spec at_or_later_than(Ecto.Queryable.t(), atom, any) :: Ecto.Queryable.t()
def at_or_later_than(query, field_name, castable) when is_atom(field_name) do
date_time = cast_to_date_time!(castable)
query
|> at_or_later_than(field_name, date_time)
end
@doc """
Builds an average query for a given field.
Puppy |> Plasm.average(:age) |> Repo.one
Puppy |> Plasm.average("age") |> Repo.one
"""
@spec average(Ecto.Queryable.t(), String.t) :: Ecto.Queryable.t()
def average(query, field_name) when is_binary(field_name) do
field_name = String.to_atom(field_name)
query
|> average(field_name)
end
@spec average(Ecto.Queryable.t(), atom) :: Ecto.Queryable.t()
def average(query, field_name) when is_atom(field_name) do
query
|> select([x], avg(field(x, ^field_name)))
end
@doc """
Builds a count query.
Puppy |> Plasm.count |> Repo.one
"""
@spec count(Ecto.Queryable.t()) :: Ecto.Queryable.t()
def count(query) do
query
|> select([x], count(x.id))
end
@doc """
Builds a distinct count query for a given field.
Puppy |> Plasm.count_distinct(:name) |> Repo.one
Puppy |> Plasm.count_distinct("age") |> Repo.one
"""
@spec count_distinct(Ecto.Queryable.t(), String.t) :: Ecto.Queryable.t()
def count_distinct(query, field_name) when is_binary(field_name) do
field_name = String.to_atom(field_name)
query
|> count_distinct(field_name)
end
@spec count_distinct(Ecto.Queryable.t(), atom) :: Ecto.Queryable.t()
def count_distinct(query, field_name) when is_atom(field_name) do
query
|> select([x], count(field(x, ^field_name), :distinct))
end
@doc """
Builds a distinct query for a given field.
Puppy |> Plasm.distinct_by(:age) |> Repo.all
Puppy |> Plasm.distinct_by("name") |> Repo.all
"""
@spec distinct_by(Ecto.Queryable.t(), String.t) :: Ecto.Queryable.t()
def distinct_by(query, field_name) when is_binary(field_name) do
field_name = String.to_atom(field_name)
query
|> distinct_by(field_name)
end
@spec distinct_by(Ecto.Queryable.t(), atom) :: Ecto.Queryable.t()
def distinct_by(query, field_name) when is_atom(field_name) do
query
|> distinct([x], field(x, ^field_name))
end
@doc """
Builds a query that finds all records before a specified date or date and time for a specified field name.
Puppy |> Plasm.earlier_than(:updated_at, date_or_date_time) |> Repo.all
Puppy |> Plasm.earlier_than(:updated_at, "2014-04-17") |> Repo.all
"""
@spec earlier_than(Ecto.Queryable.t(), atom, %Date{}) :: Ecto.Queryable.t()
def earlier_than(query, field_name, %Date{} = date) when is_atom(field_name) do
query
|> where([x], fragment("?::date", field(x, ^field_name)) < ^date)
end
@spec earlier_than(Ecto.Queryable.t(), atom, %DateTime{}) :: Ecto.Queryable.t()
def earlier_than(query, field_name, %DateTime{} = date_time) when is_atom(field_name) do
date = DateTime.to_date(date_time)
query
|> earlier_than(field_name, date)
end
@spec earlier_than(Ecto.Queryable.t(), atom, String.t | number) :: Ecto.Queryable.t()
def earlier_than(query, field_name, castable) when is_atom(field_name) and is_binary(castable) or is_number(castable) do
date_or_date_time = case cast_to_date_time(castable) do
{:ok, date_time} -> date_time
:error -> cast_to_date!(castable)
end
query
|> earlier_than(field_name, date_or_date_time)
end
@doc """
Builds a query that finds all records matching any of the primary key values in the provided list or value.
Puppy |> Plasm.find([1,2,3]) |> Repo.all
Puppy |> Plasm.find(10) |> Repo.one
Puppy |> Plasm.find("748192739812839") |> Repo.one
"""
@spec find(Ecto.Queryable.t(), list) :: Ecto.Queryable.t()
def find(query, primary_key_values) when is_list(primary_key_values) do
key = primary_key(query)
query
|> where_all([{key, primary_key_values}])
end
@spec find(Ecto.Queryable.t(), any) :: Ecto.Queryable.t()
def find(query, primary_key_value) do
key = primary_key(query)
query
|> where_all([{key, primary_key_value}])
end
@doc """
Builds a query that finds the first record after sorting by a specified field name ascending.
Optionally, provide an integer `n` to find only the first `n` records.
Puppy |> Plasm.earliest(:inserted_at) |> Repo.one
Puppy |> Plasm.earliest(:inserted_at, 20) |> Repo.all
"""
@spec earliest(Ecto.Queryable.t(), atom) :: Ecto.Queryable.t()
def earliest(query, field_name) when is_atom(field_name) do
query
|> earliest(field_name, 1)
end
@spec earliest(Ecto.Queryable.t(), atom, integer) :: Ecto.Queryable.t()
def earliest(query, field_name, n) when is_atom(field_name) and is_integer(n) do
query
|> order_by(asc: ^field_name)
|> limit(^n)
end
@doc """
Builds a query that finds all records after a specified field name and date or date and time.
Puppy |> Plasm.later_than(date) |> Repo.all
Puppy |> Plasm.later_than("2014-04-17") |> Repo.all
"""
@spec later_than(Ecto.Queryable.t(), atom, %Date{}) :: Ecto.Queryable.t()
def later_than(query, field_name, %Date{} = date) when is_atom(field_name) do
query
|> where([x], fragment("?::date", field(x, ^field_name)) > ^date)
end
@spec later_than(Ecto.Queryable.t(), atom, %DateTime{}) :: Ecto.Queryable.t()
def later_than(query, field_name, %DateTime{} = date_time) when is_atom(field_name) do
date = DateTime.to_date(date_time)
query
|> later_than(field_name, date)
end
@spec later_than(Ecto.Queryable.t(), atom, String.t | number) :: Ecto.Queryable.t()
def later_than(query, field_name, castable) when is_atom(field_name) and is_binary(castable) or is_number(castable) do
value = case cast_to_date_time(castable) do
{:ok, date_time} -> date_time
:error -> cast_to_date!(castable)
end
query
|> later_than(field_name, value)
end
@doc """
Builds a query that finds the last record after sorting by a specified field name ascending.
Optionally, provide an integer `n` to find only the last `n` records.
Puppy |> Plasm.latest(:inserted_at) |> Repo.one
Puppy |> Plasm.latest(:inserted_at, 20) |> Repo.all
"""
@spec latest(Ecto.Queryable.t(), atom) :: Ecto.Queryable.t()
def latest(query, field_name) when is_atom(field_name) do
query
|> latest(field_name, 1)
end
@spec latest(Ecto.Queryable.t(), atom, integer) :: Ecto.Queryable.t()
def latest(query, field_name, n) when is_atom(field_name) and is_integer(n) do
query
|> order_by(desc: ^field_name)
|> limit(^n)
end
@doc """
Builds a maximum query for a given field.
Puppy |> Plasm.maximum(:age) |> Repo.one
Puppy |> Plasm.maximum("age") |> Repo.one
"""
@spec maximum(Ecto.Queryable.t(), String.t) :: Ecto.Queryable.t()
def maximum(query, field_name) when is_binary(field_name) do
field_name = String.to_atom(field_name)
query
|> maximum(field_name)
end
@spec maximum(Ecto.Queryable.t(), atom) :: Ecto.Queryable.t()
def maximum(query, field_name) when is_atom(field_name) do
query
|> select([x], max(field(x, ^field_name)))
end
@doc """
Builds a minimum query for a given field.
Puppy |> Plasm.minimum(:age) |> Repo.one
Puppy |> Plasm.minimum("age") |> Repo.one
"""
@spec minimum(Ecto.Queryable.t(), String.t) :: Ecto.Queryable.t()
def minimum(query, field_name) when is_binary(field_name) do
field_name = String.to_atom(field_name)
query
|> minimum(field_name)
end
@spec minimum(Ecto.Queryable.t(), atom) :: Ecto.Queryable.t()
def minimum(query, field_name) when is_atom(field_name) do
query
|> select([x], min(field(x, ^field_name)))
end
@doc """
Builds a query that finds all records on a specified date for a specified field name.
Puppy |> Plasm.on(:inserted_at, date) |> Repo.all
Puppy |> Plasm.on(:inserted_at, "2014-04-17") |> Repo.all
"""
@spec on(Ecto.Queryable.t(), atom, %Date{}) :: Ecto.Queryable.t()
def on(query, field_name, %Date{} = date) when is_atom(field_name) do
query
|> where([x], fragment("?::date", field(x, ^field_name)) == ^date)
end
@spec on(Ecto.Queryable.t(), atom, any) :: Ecto.Queryable.t()
def on(query, field_name, castable) when is_atom(field_name) do
date = cast_to_date!(castable)
query
|> on(field_name, date)
end
@doc """
Builds a query that finds all records on or before a specified date for a specified field name.
Puppy |> Plasm.on_or_earlier_than(date) |> Repo.all
Puppy |> Plasm.on_or_earlier_than("2014-04-17") |> Repo.all
"""
@spec on_or_earlier_than(Ecto.Queryable.t(), atom, %Date{}) :: Ecto.Queryable.t()
def on_or_earlier_than(query, field_name, %Date{} = date) when is_atom(field_name) do
{:ok, next_day_date} =
date
|> Date.to_erl
|> :calendar.date_to_gregorian_days
|> Kernel.+(1)
|> :calendar.gregorian_days_to_date
|> Date.from_erl
query
|> where([x], fragment("?::date", field(x, ^field_name)) < ^next_day_date)
end
@spec on_or_earlier_than(Ecto.Queryable.t(), atom, any) :: Ecto.Queryable.t()
def on_or_earlier_than(query, field_name, castable) when is_atom(field_name) do
date = cast_to_date!(castable)
query
|> on_or_earlier_than(field_name, date)
end
@doc """
Builds a query that finds all records on or after a specified date for a specified field name.
Puppy |> Plasm.on_or_later_than(date) |> Repo.all
Puppy |> Plasm.on_or_later_than("2014-04-17") |> Repo.all
"""
@spec on_or_later_than(Ecto.Queryable.t(), atom, %Date{}) :: Ecto.Queryable.t()
def on_or_later_than(query, field_name, %Date{} = date) when is_atom(field_name) do
query
|> where([x], fragment("?::date", field(x, ^field_name)) >= ^date)
end
@spec on_or_later_than(Ecto.Queryable.t(), atom, any) :: Ecto.Queryable.t()
def on_or_later_than(query, field_name, castable) when is_atom(field_name) do
date = cast_to_date!(castable)
query
|> on_or_later_than(field_name, date)
end
@doc """
Builds a query that grabs a random record.
Optionally, provide an integer `n` to fetch `n` random records.
Puppy |> Plasm.random |> Repo.one
Puppy |> Plasm.random(20) |> Repo.all
"""
@spec random(Ecto.Queryable.t()) :: Ecto.Queryable.t()
def random(query) do
query
|> random(1)
end
@spec random(Ecto.Queryable.t(), integer) :: Ecto.Queryable.t()
def random(query, n) when is_integer(n) do
# TODO: support databases other than postgres
query
|> order_by([_], fragment("RANDOM()"))
|> limit(^n)
end
@doc """
Builds a sum query for a given field.
Puppy |> Plasm.total(:age) |> Repo.one
Puppy |> Plasm.total("age") |> Repo.one
"""
@spec total(Ecto.Queryable.t(), String.t) :: Ecto.Queryable.t()
def total(query, field_name) when is_binary(field_name) do
field_name = String.to_atom(field_name)
query
|> total(field_name)
end
@spec total(Ecto.Queryable.t(), atom) :: Ecto.Queryable.t()
def total(query, field_name) when is_atom(field_name) do
query
|> select([x], sum(field(x, ^field_name)))
end
@doc """
Builds a query that finds all records matching all specified field names and values.
Values can be lists or non-lists.
When the values are all non-lists, it simply delegates to `Ecto.Query.where`.
When there is at least one list value, it builds the query itself, using `in` for lists.
Puppy |> Plasm.where_all(name: "Fluffy", age: 3) |> Repo.all
Puppy |> Plasm.where_all(name: "Fluffy", age: [3,5,10]) |> Repo.all
"""
@spec where_all(Ecto.Queryable.t(), list) :: Ecto.Queryable.t()
def where_all(query, field_names_and_values) do
contains_at_least_one_list = Keyword.values(field_names_and_values)
|> Enum.any?(fn (value) -> is_list(value) end)
query
|> do_where_all(field_names_and_values, contains_at_least_one_list)
end
@doc """
Builds a query that finds all records matching none of the specified field names and values.
Values can be lists or non-lists.
Non-list expressions result in a `!=` comparison.
List expressions result in a `not in` comparison.
Puppy |> Plasm.where_none(name: "Fluffy", age: 3) |> Repo.all
Puppy |> Plasm.where_none(name: "Fluffy", age: [3,5,10]) |> Repo.all
"""
@spec where_none(Ecto.Queryable.t(), list) :: Ecto.Queryable.t()
def where_none(query, field_names_and_values) do
Enum.reduce(field_names_and_values, query, fn ({field_name, field_value}, query) ->
generate_where_clause_for_where_none(query, field_name, field_value)
end)
end
# PRIVATE ##################################################
defp primary_key(query) do
[key] = model(query).__schema__(:primary_key)
key
end
defp model(%Ecto.Query{from: %Ecto.Query.FromExpr{source: {_, model_or_query}}}) do
model(model_or_query)
end
defp model(model), do: model
defp generate_where_clause_for_where_all(query, field_name, field_value) when is_list(field_value) do
query
|> where([x], field(x, ^field_name) in ^field_value)
end
defp generate_where_clause_for_where_all(query, field_name, field_value) do
query
|> where([x], field(x, ^field_name) == ^field_value)
end
defp generate_where_clause_for_where_none(query, field_name, field_value) when is_list(field_value) do
query
|> where([x], field(x, ^field_name) not in ^field_value)
end
defp generate_where_clause_for_where_none(query, field_name, field_value) do
query
|> where([x], field(x, ^field_name) != ^field_value)
end
defp do_where_all(query, field_names_and_values, true) do
Enum.reduce(field_names_and_values, query, fn ({field_name, field_value}, query) ->
generate_where_clause_for_where_all(query, field_name, field_value)
end)
end
defp do_where_all(query, field_names_and_values, false) do
query
|> where(^field_names_and_values)
end
defp cast_to_date_time(castable) when is_binary(castable) do
case DateTime.from_iso8601(castable) do
{:ok, date_time, _} -> {:ok, date_time}
{:error, _} -> :error
end
end
defp cast_to_date_time(castable) when is_number(castable) do
case DateTime.from_unix(castable) do
{:ok, date_time} -> {:ok, date_time}
{:error, _} -> :error
end
end
defp cast_to_date_time!(castable) when is_binary(castable) do
case DateTime.from_iso8601(castable) do
{:ok, date_time, _} -> date_time
{:error, _} -> raise ArgumentError, message: "invalid argument when casting to DateTime: #{inspect(castable)}"
end
end
defp cast_to_date_time!(castable) when is_number(castable) do
case DateTime.from_unix(castable) do
{:ok, date_time} -> date_time
{:error, _} -> raise ArgumentError, message: "invalid argument when casting to DateTime: #{inspect(castable)}"
end
end
defp cast_to_date!(castable) do
case Date.from_iso8601(castable) do
{:ok, date} -> date
{:error, _} ->
raise ArgumentError, message: "invalid argument when casting to Date: #{inspect(castable)}"
end
end
end
| 33.053232 | 122 | 0.67261 |
9efdd255d5ca26e1639c81cd09ee3b453841d58a | 117 | exs | Elixir | apps/day23/test/day23_test.exs | jwarwick/aoc_2019 | 04229b86829b72323498b57a6649fcc6f7c96406 | [
"MIT"
] | 2 | 2019-12-21T21:21:04.000Z | 2019-12-27T07:00:19.000Z | apps/day23/test/day23_test.exs | jwarwick/aoc_2019 | 04229b86829b72323498b57a6649fcc6f7c96406 | [
"MIT"
] | null | null | null | apps/day23/test/day23_test.exs | jwarwick/aoc_2019 | 04229b86829b72323498b57a6649fcc6f7c96406 | [
"MIT"
] | null | null | null | defmodule Day23Test do
use ExUnit.Case
doctest Day23
test "part1" do
assert Day23.part2 == :val
end
end
| 13 | 30 | 0.692308 |
9efe045724b884a151c5a706f786a1ab5c826aaf | 1,123 | exs | Elixir | config/config.exs | sschneider1207/gen_gossip | 9a36aadbe5443f5ba81025125bff95fbb227ec83 | [
"MIT"
] | null | null | null | config/config.exs | sschneider1207/gen_gossip | 9a36aadbe5443f5ba81025125bff95fbb227ec83 | [
"MIT"
] | null | null | null | config/config.exs | sschneider1207/gen_gossip | 9a36aadbe5443f5ba81025125bff95fbb227ec83 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :gen_gossip, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:gen_gossip, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.225806 | 73 | 0.752449 |
9efe09e870a530426e3975ce48e5541caaacf9ae | 2,028 | ex | Elixir | lib/helper/sender.ex | mishka-group/mishka_installer | ab19e3e1e8aaa984ec48d6277a77d567eb5f61ea | [
"Apache-2.0"
] | 3 | 2022-03-18T14:32:15.000Z | 2022-03-24T06:33:21.000Z | lib/helper/sender.ex | mishka-group/mishka_installer | ab19e3e1e8aaa984ec48d6277a77d567eb5f61ea | [
"Apache-2.0"
] | 3 | 2022-03-25T08:30:42.000Z | 2022-03-27T17:13:46.000Z | lib/helper/sender.ex | mishka-group/mishka_installer | ab19e3e1e8aaa984ec48d6277a77d567eb5f61ea | [
"Apache-2.0"
] | null | null | null | defmodule MishkaInstaller.Helper.Sender do
@moduledoc """
At first, we try to get basic information from `hex.pm` website; but after releasing some versions of MishkaInstaller,
this API can be useful for managing packages from admin panel.
**Ref: `https://github.com/hexpm/hexpm/issues/1124`**
"""
@request_name HexClientApi
alias MishkaInstaller.Helper.Extra
@type app :: map()
@spec package(String.t(), app()) :: list | {:error, :package, :mix_file | :not_found | :not_tag | :unhandled} | {:ok, :package, any}
def package("hex", %{"app" => name} = _app) do
send_build(:get, "https://hex.pm/api/packages/#{name}")
end
def package("github", %{"url" => url, "tag" => tag} = _app) when not is_nil(url) and not is_nil(tag) do
new_url = String.replace(String.trim(url), "https://github.com/", "https://raw.githubusercontent.com/") <> "/#{String.trim(tag)}/mix.exs"
send_build(:get, new_url, :normal)
|> get_basic_information_form_github(String.trim(tag))
end
def package(_status, _app), do: {:error, :package, :not_tag}
defp send_build(:get, url, request \\ :json) do
Finch.build(:get, url)
|> Finch.request(@request_name)
|> request_handler(request)
end
defp request_handler({:ok, %Finch.Response{body: body, headers: _headers, status: 200}}, :json) do
{:ok, :package, Jason.decode!(body)}
end
defp request_handler({:ok, %Finch.Response{body: body, headers: _headers, status: 200}}, :normal) do
{:ok, :package, body}
end
defp request_handler({:ok, %Finch.Response{status: 404}}, _), do: {:error, :package, :not_found}
defp request_handler(_outputs, _), do: {:error, :package, :unhandled}
defp get_basic_information_form_github({:ok, :package, body}, tag) do
case Code.string_to_quoted(body) do
{:ok, ast} -> Extra.ast_mix_file_basic_information(ast, [:app, :version, :source_url], [{:tag, tag}])
_ -> {:error, :package, :mix_file}
end
end
defp get_basic_information_form_github(output, _tag), do: output
end
| 39 | 141 | 0.672584 |
9efe3c179ee2ce9747adc75694bf80eb75b699a9 | 1,545 | ex | Elixir | apps/web/lib/web_web.ex | renovate-tests/ex | 2e02d5a1e59122c13a0ebd2321f5cb291160aaa8 | [
"MIT"
] | 1 | 2019-10-30T19:38:12.000Z | 2019-10-30T19:38:12.000Z | apps/web/lib/web_web.ex | renovate-tests/ex | 2e02d5a1e59122c13a0ebd2321f5cb291160aaa8 | [
"MIT"
] | 80 | 2020-03-02T20:34:55.000Z | 2022-03-30T12:11:21.000Z | apps/web/lib/web_web.ex | 9renpoto/phx | 06457b5e5630e45e17add8cb5097004dc1ff7e9d | [
"MIT"
] | 1 | 2019-07-28T19:45:59.000Z | 2019-07-28T19:45:59.000Z | defmodule WebWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use WebWeb, :controller
use WebWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: WebWeb
import Plug.Conn
import WebWeb.Router.Helpers
import WebWeb.Gettext
end
end
def view do
quote do
use Phoenix.View,
root: "lib/web_web/templates",
namespace: WebWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import WebWeb.Router.Helpers
import WebWeb.ErrorHelpers
import WebWeb.Gettext
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import WebWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 22.391304 | 69 | 0.680259 |
9efe49b6aac9b540cbe2b386431d8346e8b51b23 | 1,551 | exs | Elixir | test/features/page/on_event_test.exs | Proporus/playwright-elixir | 639fea3c7076657a9104e6fd742b0c2f4e2f167f | [
"MIT"
] | null | null | null | test/features/page/on_event_test.exs | Proporus/playwright-elixir | 639fea3c7076657a9104e6fd742b0c2f4e2f167f | [
"MIT"
] | null | null | null | test/features/page/on_event_test.exs | Proporus/playwright-elixir | 639fea3c7076657a9104e6fd742b0c2f4e2f167f | [
"MIT"
] | null | null | null | defmodule Test.Features.Page.OnEventTest do
use Playwright.TestCase, async: true
describe "Page.on/3" do
test "on 'close'", %{page: page} do
this = self()
guid = page.guid
Playwright.Page.on(page, "close", fn event ->
send(this, event)
end)
Playwright.Page.close(page)
assert_received({:on, :close, %Playwright.Page{guid: ^guid, initializer: %{isClosed: true}}})
end
# NOTE: this is really about *any* `on` event handling
test "on 'close' of one Page does not affect another", %{browser: browser} do
this = self()
%{guid: guid_one} = page_one = Playwright.Browser.new_page(browser)
%{guid: guid_two} = page_two = Playwright.Browser.new_page(browser)
Playwright.Page.on(page_one, "close", fn {:on, :close, page} ->
send(this, page.guid)
end)
Playwright.Page.close(page_one)
Playwright.Page.close(page_two)
assert_received(^guid_one)
refute_received(^guid_two)
end
test "on 'console'", %{page: page} do
test_pid = self()
Playwright.Page.on(page, "console", fn event ->
send(test_pid, event)
end)
Playwright.Page.evaluate(page, "function () { console.info('lala!'); }")
Playwright.Page.evaluate(page, "console.error('lulu!')")
assert_received({:on, :console, %Playwright.ConsoleMessage{initializer: %{text: "lala!", type: "info"}}})
assert_received({:on, :console, %Playwright.ConsoleMessage{initializer: %{text: "lulu!", type: "error"}}})
end
end
end
| 31.02 | 112 | 0.630561 |
9efe51996f72b3aab0db2f2c66e89c08f519de99 | 1,987 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/accounts_list_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/accounts_list_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/accounts_list_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.DFAReporting.V33.Model.AccountsListResponse do
@moduledoc """
Account List Response
## Attributes
* `accounts` (*type:* `list(GoogleApi.DFAReporting.V33.Model.Account.t)`, *default:* `nil`) - Account collection.
* `kind` (*type:* `String.t`, *default:* `dfareporting#accountsListResponse`) - Identifies what kind of resource this is. Value: the fixed string "dfareporting#accountsListResponse".
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - Pagination token to be used for the next list operation.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:accounts => list(GoogleApi.DFAReporting.V33.Model.Account.t()),
:kind => String.t(),
:nextPageToken => String.t()
}
field(:accounts, as: GoogleApi.DFAReporting.V33.Model.Account, type: :list)
field(:kind)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V33.Model.AccountsListResponse do
def decode(value, options) do
GoogleApi.DFAReporting.V33.Model.AccountsListResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V33.Model.AccountsListResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.490566 | 186 | 0.73226 |
9efe5f42a0c4a028f5de2f3d61c9bfd5f9571982 | 26 | ex | Elixir | lib/toy_robot.ex | lowski/toy-robot | 107e5c68c1ba0813be9efb3949f6fde348aaa05b | [
"MIT"
] | null | null | null | lib/toy_robot.ex | lowski/toy-robot | 107e5c68c1ba0813be9efb3949f6fde348aaa05b | [
"MIT"
] | null | null | null | lib/toy_robot.ex | lowski/toy-robot | 107e5c68c1ba0813be9efb3949f6fde348aaa05b | [
"MIT"
] | null | null | null | defmodule ToyRobot do
end
| 8.666667 | 21 | 0.846154 |
9efe68babac15266d1a9b0194f35f0fde485335f | 2,566 | ex | Elixir | apps/ex_wire/lib/ex_wire/message.ex | atoulme/ethereum | cebb0756c7292ac266236636d2ab5705cb40a52e | [
"MIT"
] | 14 | 2017-08-21T06:14:49.000Z | 2020-05-15T12:00:52.000Z | apps/ex_wire/lib/ex_wire/message.ex | atoulme/ethereum | cebb0756c7292ac266236636d2ab5705cb40a52e | [
"MIT"
] | 7 | 2017-08-11T07:50:14.000Z | 2018-08-23T20:42:50.000Z | apps/ex_wire/lib/ex_wire/message.ex | atoulme/ethereum | cebb0756c7292ac266236636d2ab5705cb40a52e | [
"MIT"
] | 3 | 2017-08-20T17:56:41.000Z | 2018-08-21T00:36:10.000Z | defmodule ExWire.Message do
@moduledoc """
Defines a behavior for messages so that they can be
easily encoded and decoded.
"""
defmodule UnknownMessageError do
defexception [:message]
end
@type t :: module()
@type message_id :: integer()
@callback message_id() :: message_id
@callback encode(t) :: binary()
@callback to(t) :: ExWire.Endpoint.t | nil
@message_types %{
0x01 => ExWire.Message.Ping,
0x02 => ExWire.Message.Pong,
0x03 => ExWire.Message.FindNeighbours,
0x04 => ExWire.Message.Neighbours,
}
@doc """
Decodes a message of given `type` based on the encoded
data. Effectively reverses the `decode/1` function.
## Examples
iex> ExWire.Message.decode(0x01, <<210, 1, 199, 132, 1, 2, 3, 4, 128, 5, 199, 132, 5, 6, 7, 8, 6, 128, 4>>)
%ExWire.Message.Ping{
version: 1,
from: %ExWire.Struct.Endpoint{ip: {1, 2, 3, 4}, tcp_port: 5, udp_port: nil},
to: %ExWire.Struct.Endpoint{ip: {5, 6, 7, 8}, tcp_port: nil, udp_port: 6},
timestamp: 4
}
iex> ExWire.Message.decode(0x02, <<202, 199, 132, 5, 6, 7, 8, 6, 128, 2, 3>>)
%ExWire.Message.Pong{
to: %ExWire.Struct.Endpoint{ip: {5, 6, 7, 8}, tcp_port: nil, udp_port: 6}, hash: <<2>>, timestamp: 3
}
iex> ExWire.Message.decode(0x99, <<>>)
** (ExWire.Message.UnknownMessageError) Unknown message type: 0x99
"""
@spec decode(integer(), binary()) :: t
def decode(type, data) do
case @message_types[type] do
nil -> raise UnknownMessageError, "Unknown message type: #{inspect type, base: :hex}"
mod -> mod.decode(data)
end
end
@doc """
Encoded a message by concatting its `message_id` to
the encoded data of the message itself.
## Examples
iex> ExWire.Message.encode(
...> %ExWire.Message.Ping{
...> version: 1,
...> from: %ExWire.Struct.Endpoint{ip: {1, 2, 3, 4}, tcp_port: 5, udp_port: nil},
...> to: %ExWire.Struct.Endpoint{ip: {5, 6, 7, 8}, tcp_port: nil, udp_port: 6},
...> timestamp: 4
...> }
...> )
<<1, 214, 1, 201, 132, 1, 2, 3, 4, 128, 130, 0, 5, 201, 132, 5, 6, 7, 8, 130, 0, 6, 128, 4>>
iex> ExWire.Message.encode(%ExWire.Message.Pong{to: %ExWire.Struct.Endpoint{ip: {5, 6, 7, 8}, tcp_port: nil, udp_port: 6}, hash: <<2>>, timestamp: 3})
<<2, 204, 201, 132, 5, 6, 7, 8, 130, 0, 6, 128, 2, 3>>
"""
@spec encode(t) :: binary()
def encode(message) do
<<message.__struct__.message_id()>> <> message.__struct__.encode(message)
end
end | 32.481013 | 156 | 0.593141 |
9efe7558037e8b9ab5ecb94cb30f79ba9a63551d | 439 | exs | Elixir | mix.exs | membraneframework/membrane_ogg_format | 8d82b1f4bce00c24bdfaa6757303b92354dc2fc2 | [
"Apache-2.0"
] | 1 | 2021-04-29T06:47:45.000Z | 2021-04-29T06:47:45.000Z | mix.exs | membraneframework/membrane_ogg_format | 8d82b1f4bce00c24bdfaa6757303b92354dc2fc2 | [
"Apache-2.0"
] | null | null | null | mix.exs | membraneframework/membrane_ogg_format | 8d82b1f4bce00c24bdfaa6757303b92354dc2fc2 | [
"Apache-2.0"
] | 1 | 2020-11-18T14:22:52.000Z | 2020-11-18T14:22:52.000Z | defmodule MembraneOggFormat.MixProject do
use Mix.Project
@version "0.1.0"
def project do
[
app: :membrane_ogg_format,
version: @version,
elixir: "~> 1.10",
deps: deps()
]
end
def application do
[
extra_applications: []
]
end
defp deps do
[
{:ex_doc, "~> 0.21.0", only: :dev, runtime: false},
{:dialyxir, "~> 1.0", only: :dev, runtime: false}
]
end
end
| 15.678571 | 57 | 0.546697 |
9efe94b607e065a247f8a556c273dbab2ebb75ee | 3,716 | ex | Elixir | lib/blockfrost/cardano/assets.ex | blockfrost/blockfrost-elixir | b1f8ea7ae47cd3a7037e1c9ed0d3691fc775bdec | [
"Apache-2.0"
] | 13 | 2021-08-31T03:54:37.000Z | 2022-01-30T17:39:40.000Z | lib/blockfrost/cardano/assets.ex | blockfrost/blockfrost-elixir | b1f8ea7ae47cd3a7037e1c9ed0d3691fc775bdec | [
"Apache-2.0"
] | 6 | 2021-08-30T04:45:52.000Z | 2021-09-23T09:15:08.000Z | lib/blockfrost/cardano/assets.ex | blockfrost/blockfrost-elixir | b1f8ea7ae47cd3a7037e1c9ed0d3691fc775bdec | [
"Apache-2.0"
] | null | null | null | defmodule Blockfrost.Cardano.Assets do
@moduledoc "Functions for to the /assets namespace in the Blockfrost API"
alias Blockfrost.HTTP
alias Blockfrost.Response
alias Blockfrost.Utils
alias Blockfrost.Response.{
AssetsResponse,
SpecificAssetResponse,
AssetHistoryResponse,
AssetTransactionsResponse,
AssetAddressesResponse,
SpecificPolicyAssetsResponse
}
@doc """
Lists of assets
Supports pagination.
[API Docs](https://docs.blockfrost.io/#tag/Cardano-Assets/paths/~1assets/get)
"""
@spec assets(Blockfrost.t(), Keyword.t()) :: {:ok, AssetsResponse.t()} | HTTP.error_response()
def assets(name, opts \\ []) do
Utils.validate_cardano!(name)
opts = Utils.extract_pagination(opts)
name
|> HTTP.build_and_send(:get, "/assets", opts)
|> Response.deserialize(AssetsResponse)
end
@doc """
Information about a specific asset
[API Docs](https://docs.blockfrost.io/#tag/Cardano-Assets/paths/~1assets~1{asset}/get)
"""
@spec specific_asset(Blockfrost.t(), String.t(), Keyword.t()) ::
{:ok, SpecificAssetResponse.t()} | HTTP.error_response()
def specific_asset(name, asset, opts \\ []) do
Utils.validate_cardano!(name)
name
|> HTTP.build_and_send(:get, "/assets/#{asset}", opts)
|> Response.deserialize(SpecificAssetResponse)
end
@doc """
History of a specific asset
Supports pagination.
[API Docs](https://docs.blockfrost.io/#tag/Cardano-Assets/paths/~1assets~1{asset}~1history/get)
"""
@spec asset_history(Blockfrost.t(), String.t(), Keyword.t()) ::
{:ok, AssetHistoryResponse.t()} | HTTP.error_response()
def asset_history(name, asset, opts \\ []) do
Utils.validate_cardano!(name)
opts = Utils.extract_pagination(opts)
name
|> HTTP.build_and_send(:get, "/assets/#{asset}/history", opts)
|> Response.deserialize(AssetHistoryResponse)
end
@doc """
List of a specific asset transactions
Supports pagination.
[API Docs](https://docs.blockfrost.io/#tag/Cardano-Assets/paths/~1assets~1{asset}~1transactions/get)
"""
@spec asset_transactions(Blockfrost.t(), String.t(), Keyword.t()) ::
{:ok, AssetTransactionsResponse.t()} | HTTP.error_response()
def asset_transactions(name, asset, opts \\ []) do
Utils.validate_cardano!(name)
opts = Utils.extract_pagination(opts)
name
|> HTTP.build_and_send(:get, "/assets/#{asset}/transactions", opts)
|> Response.deserialize(AssetTransactionsResponse)
end
@doc """
List of a addresses containing a specific asset
Supports pagination.
[API Docs](https://docs.blockfrost.io/#tag/Cardano-Assets/paths/~1assets~1{asset}~1addresses/get)
"""
@spec asset_addresses(Blockfrost.t(), String.t(), Keyword.t()) ::
{:ok, AssetAddressesResponse.t()} | HTTP.error_response()
def asset_addresses(name, asset, opts \\ []) do
Utils.validate_cardano!(name)
opts = Utils.extract_pagination(opts)
name
|> HTTP.build_and_send(:get, "/assets/#{asset}/addresses", opts)
|> Response.deserialize(AssetAddressesResponse)
end
@doc """
List of asset minted under a specific policy
Supports pagination.
[API Docs](https://docs.blockfrost.io/#tag/Cardano-Assets/paths/~1assets~1policy~1{policy_id}/get)
"""
@spec specific_policy_assets(Blockfrost.t(), String.t(), Keyword.t()) ::
{:ok, SpecificPolicyAssetsResponse.t()} | HTTP.error_response()
def specific_policy_assets(name, policy_id, opts \\ []) do
Utils.validate_cardano!(name)
opts = Utils.extract_pagination(opts)
name
|> HTTP.build_and_send(:get, "/assets/policy/#{policy_id}", opts)
|> Response.deserialize(SpecificPolicyAssetsResponse)
end
end
| 30.710744 | 102 | 0.692142 |
9efe9bd38fbefe60149185a0f7b5656bd9c8cc43 | 476 | exs | Elixir | test/history/products/all_test.exs | fremantle-industries/history | a8a33744279ff4ca62620785f9a2e9c0c99e4de7 | [
"MIT"
] | 20 | 2021-08-06T01:09:48.000Z | 2022-03-28T18:44:56.000Z | test/history/products/all_test.exs | fremantle-industries/history | a8a33744279ff4ca62620785f9a2e9c0c99e4de7 | [
"MIT"
] | 13 | 2021-08-21T21:17:02.000Z | 2022-03-27T06:33:51.000Z | test/history/products/all_test.exs | fremantle-industries/history | a8a33744279ff4ca62620785f9a2e9c0c99e4de7 | [
"MIT"
] | 2 | 2021-09-23T11:31:59.000Z | 2022-01-09T16:19:35.000Z | defmodule History.Products.AllTest do
use History.DataCase
alias History.Factories
test "returns all products" do
{:ok, spot_product} = Factories.Product.create(%{symbol: "btc/usd", type: :spot})
{:ok, swap_product} = Factories.Product.create(%{symbol: "btc-perp", type: :swap})
products = History.Products.all()
assert Enum.count(products) == 2
assert Enum.member?(products, spot_product)
assert Enum.member?(products, swap_product)
end
end
| 31.733333 | 86 | 0.707983 |
9efed825467b929df02b9a4edc334be2598d83c8 | 695 | exs | Elixir | test/ecdsa_test.exs | PinkDiamond1/ecdsa-elixir | dd1953cc48974b5bfb9461d3a2179d1d937b264d | [
"MIT"
] | null | null | null | test/ecdsa_test.exs | PinkDiamond1/ecdsa-elixir | dd1953cc48974b5bfb9461d3a2179d1d937b264d | [
"MIT"
] | null | null | null | test/ecdsa_test.exs | PinkDiamond1/ecdsa-elixir | dd1953cc48974b5bfb9461d3a2179d1d937b264d | [
"MIT"
] | null | null | null | defmodule EcdsaTest do
use ExUnit.Case
alias EllipticCurve.{PrivateKey, Ecdsa}
test "verify right message" do
privateKey = PrivateKey.generate()
publicKey = PrivateKey.getPublicKey(privateKey)
message = "This is the right message"
signature = Ecdsa.sign(message, privateKey)
assert Ecdsa.verify?(message, signature, publicKey)
end
test "verify wrong message" do
privateKey = PrivateKey.generate()
publicKey = PrivateKey.getPublicKey(privateKey)
message1 = "This is the right message"
message2 = "This is the wrong message"
signature = Ecdsa.sign(message1, privateKey)
assert !Ecdsa.verify?(message2, signature, publicKey)
end
end
| 23.965517 | 57 | 0.723741 |
9eff05a0d7666dba8ed89adbfe82e481a430ab12 | 1,244 | ex | Elixir | lib/rixile.ex | gentom/rixile | fb9e65acf4c4e38154fddc393d669a3d01dd0005 | [
"MIT"
] | null | null | null | lib/rixile.ex | gentom/rixile | fb9e65acf4c4e38154fddc393d669a3d01dd0005 | [
"MIT"
] | null | null | null | lib/rixile.ex | gentom/rixile | fb9e65acf4c4e38154fddc393d669a3d01dd0005 | [
"MIT"
] | null | null | null | defmodule Rixile do
use Rixile.Router
def route("GET", ["users", user_id], conn) do
conn |> Plug.Conn.send_resp(200, "You requested user #{user_id}")
end
def route("POST", ["users"], conn) do
IO.puts "POST"
end
def route(_method, _path, conn) do
conn |> Plug.Conn.send_resp(404, "Couldn't find that page")
end
@moduledoc """
Documentation for Rixile.
"""
@doc """
Hello world.
## Examples
iex> Rixile.hello
:world
"""
'''
def hello do
:world
end
def init(default_opts) do
IO.puts "starting up Rixile..."
default_opts
end
def call(conn, _opts) do
IO.puts "Calling Rixile call"
Plug.Conn.send_resp(conn, 200, "Hello Rixile :)")
end
def call(conn, _opts) do
IO.puts "Calling Rixile call"
# `A() |> B() |> C()` means `C(B(A))`
# `conn |> Plug.Conn.put_resp_header("Server", "Plug")` is is equivalent to `Plug.Conn.put_resp_header(conn, "Server", "Plug")`
conn |> Plug.Conn.put_resp_header("Server", "Plug") |> Plug.Conn.send_resp(200, "Hello Rixile :)")
# you can rewrite the line above
# conn2 = Plug.Conn.put_resp_header(conn, "Server", "Plug")
# Plug.Conn.send_resp(conn2, 200, "Hello Rixile :)")
end
'''
end
| 21.448276 | 131 | 0.614148 |
9eff31ddf5e51017740ab7caeeaf22a512628841 | 798 | exs | Elixir | containers/chat/test/chat_web/channels/awayteam_channel_test.exs | shiftyp/awayteam | 297e21305a109af3aee9ac3dc5820f8220c1b380 | [
"MIT"
] | null | null | null | containers/chat/test/chat_web/channels/awayteam_channel_test.exs | shiftyp/awayteam | 297e21305a109af3aee9ac3dc5820f8220c1b380 | [
"MIT"
] | 6 | 2021-03-09T19:50:06.000Z | 2022-02-26T18:41:08.000Z | containers/chat/test/chat_web/channels/awayteam_channel_test.exs | shiftyp/awayteam | 297e21305a109af3aee9ac3dc5820f8220c1b380 | [
"MIT"
] | null | null | null | defmodule ChatWeb.AwayteamChannelTest do
use ChatWeb.ChannelCase
alias ChatWeb.AwayteamChannel
setup do
{:ok, _, socket} =
socket("user_id", %{some: :assign})
|> subscribe_and_join(AwayteamChannel, "awayteam:lobby")
{:ok, socket: socket}
end
test "ping replies with status ok", %{socket: socket} do
ref = push socket, "ping", %{"hello" => "there"}
assert_reply ref, :ok, %{"hello" => "there"}
end
test "shout broadcasts to awayteam:lobby", %{socket: socket} do
push socket, "shout", %{"hello" => "all"}
assert_broadcast "shout", %{"hello" => "all"}
end
test "broadcasts are pushed to the client", %{socket: socket} do
broadcast_from! socket, "broadcast", %{"some" => "data"}
assert_push "broadcast", %{"some" => "data"}
end
end
| 27.517241 | 66 | 0.635338 |
9eff7702037ea9532c6044c1a653a5d657730d04 | 479 | exs | Elixir | config/config.exs | xfumihiro/elasticsearch-elixir | bd71da03f54dd0c1b892d60472d05a4fe2be441e | [
"MIT"
] | null | null | null | config/config.exs | xfumihiro/elasticsearch-elixir | bd71da03f54dd0c1b892d60472d05a4fe2be441e | [
"MIT"
] | null | null | null | config/config.exs | xfumihiro/elasticsearch-elixir | bd71da03f54dd0c1b892d60472d05a4fe2be441e | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
config :elasticsearch, Elasticsearch.Test.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "elasticsearch_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox,
priv: "test/support/"
config :elasticsearch, ecto_repos: [Elasticsearch.Test.Repo]
config :logger, level: :warn
| 28.176471 | 61 | 0.759916 |
9eff949d9488991f72759db8d958b6695f47a414 | 1,115 | exs | Elixir | flight_booking/test/users/create_or_update_test.exs | joabehenrique/flight-booking | d0cb4ead0e3a14b8d15f913d545d6b4168795d2b | [
"MIT"
] | null | null | null | flight_booking/test/users/create_or_update_test.exs | joabehenrique/flight-booking | d0cb4ead0e3a14b8d15f913d545d6b4168795d2b | [
"MIT"
] | null | null | null | flight_booking/test/users/create_or_update_test.exs | joabehenrique/flight-booking | d0cb4ead0e3a14b8d15f913d545d6b4168795d2b | [
"MIT"
] | null | null | null | defmodule Flightex.Users.CreateOrUpdateTest do
use ExUnit.Case, async: true
alias Flightex.Users.{Agent, CreateOrUpdate}
describe "call/1" do
setup do
Agent.start_link(%{})
# O Agent.start_link vai iniciar os 2 agents antes do teste
# Deve ser implementado para os testes passarem
:ok
end
test "when all params are valid, return a tuple" do
params = %{
name: "Jp",
email: "[email protected]",
cpf: "12345678900"
}
CreateOrUpdate.call(params)
{_ok, response} = Agent.get(params.cpf)
expected_response = %Flightex.Users.User{
cpf: "12345678900",
email: "[email protected]",
id: response.id,
name: "Jp"
}
assert response == expected_response
end
test "when cpf is a integer, returns an error" do
params = %{
name: "Jp",
email: "[email protected]",
cpf: 12_345_678_900
}
expected_response = {:error, "Cpf must be a String"}
response = CreateOrUpdate.call(params)
assert response == expected_response
end
end
end
| 22.3 | 65 | 0.600897 |
9effe73d995e089b74f3bbbe780b5061ee413458 | 13,917 | exs | Elixir | test/response_test.exs | kalamarski-marcin/bolt_sips | fe0e64d69c50bf41d100978628daff5a50f1d44c | [
"Apache-2.0"
] | null | null | null | test/response_test.exs | kalamarski-marcin/bolt_sips | fe0e64d69c50bf41d100978628daff5a50f1d44c | [
"Apache-2.0"
] | null | null | null | test/response_test.exs | kalamarski-marcin/bolt_sips | fe0e64d69c50bf41d100978628daff5a50f1d44c | [
"Apache-2.0"
] | null | null | null | defmodule ResponseTest do
use ExUnit.Case
alias Bolt.Sips.Response
# import ExUnit.CaptureLog
@explain [
success: %{"fields" => ["n"], "t_first" => 1},
success: %{
"bookmark" => "neo4j:bookmark:v1:tx13440",
"plan" => %{
"args" => %{
"EstimatedRows" => 1.0,
"planner" => "COST",
"planner-impl" => "IDP",
"planner-version" => "3.5",
"runtime" => "INTERPRETED",
"runtime-impl" => "INTERPRETED",
"runtime-version" => "3.5",
"version" => "CYPHER 3.5"
},
"children" => [
%{
"args" => %{"EstimatedRows" => 1.0},
"children" => [],
"identifiers" => ["n"],
"operatorType" => "Create"
}
],
"identifiers" => ["n"],
"operatorType" => "ProduceResults"
},
"t_last" => 0,
"type" => "rw"
}
]
@notifications [
success: %{"fields" => ["n", "m"], "t_first" => 0},
success: %{
"bookmark" => "neo4j:bookmark:v1:tx13440",
"notifications" => [
%{
"code" => "Neo.ClientNotification.Statement.CartesianProductWarning",
"description" => "bad juju",
"position" => %{"column" => 9, "line" => 1, "offset" => 8},
"severity" => "WARNING",
"title" => "This query builds a cartesian product between disconnected patterns."
}
],
"plan" => %{
"args" => %{
"EstimatedRows" => 36.0,
"planner" => "COST",
"planner-impl" => "IDP",
"planner-version" => "3.5",
"runtime" => "INTERPRETED",
"runtime-impl" => "INTERPRETED",
"runtime-version" => "3.5",
"version" => "CYPHER 3.5"
},
"children" => [
%{
"args" => %{"EstimatedRows" => 36.0},
"children" => [
%{
"args" => %{"EstimatedRows" => 6.0},
"children" => [],
"identifiers" => ["n"],
"operatorType" => "AllNodesScan"
},
%{
"args" => %{"EstimatedRows" => 6.0},
"children" => [],
"identifiers" => ["m"],
"operatorType" => "AllNodesScan"
}
],
"identifiers" => ["m", "n"],
"operatorType" => "CartesianProduct"
}
],
"identifiers" => ["m", "n"],
"operatorType" => "ProduceResults"
},
"t_last" => 0,
"type" => "r"
}
]
@profile_no_results [
success: %{"fields" => [], "t_first" => 20},
success: %{
"bookmark" => "neo4j:bookmark:v1:tx48642",
"profile" => %{
"args" => %{
"DbHits" => 0,
"EstimatedRows" => 1.0,
"PageCacheHitRatio" => 0.0,
"PageCacheHits" => 0,
"PageCacheMisses" => 0,
"Rows" => 0,
"planner" => "COST",
"planner-impl" => "IDP",
"planner-version" => "3.5",
"runtime" => "SLOTTED",
"runtime-impl" => "SLOTTED",
"runtime-version" => "3.5",
"version" => "CYPHER 3.5"
},
"children" => [
%{
"args" => %{
"DbHits" => 0,
"EstimatedRows" => 1.0,
"PageCacheHitRatio" => 0.0,
"PageCacheHits" => 0,
"PageCacheMisses" => 0,
"Rows" => 0
},
"children" => [
%{
"args" => %{
"DbHits" => 3,
"EstimatedRows" => 1.0,
"PageCacheHitRatio" => 0.0,
"PageCacheHits" => 0,
"PageCacheMisses" => 0,
"Rows" => 1
},
"children" => [],
"dbHits" => 3,
"identifiers" => ["n"],
"operatorType" => "Create",
"pageCacheHitRatio" => 0.0,
"pageCacheHits" => 0,
"pageCacheMisses" => 0,
"rows" => 1
}
],
"dbHits" => 0,
"identifiers" => ["n"],
"operatorType" => "EmptyResult",
"pageCacheHitRatio" => 0.0,
"pageCacheHits" => 0,
"pageCacheMisses" => 0,
"rows" => 0
}
],
"dbHits" => 0,
"identifiers" => ["n"],
"operatorType" => "ProduceResults",
"pageCacheHitRatio" => 0.0,
"pageCacheHits" => 0,
"pageCacheMisses" => 0,
"rows" => 0
},
"stats" => %{
"labels-added" => 1,
"nodes-created" => 1,
"properties-set" => 1
},
"t_last" => 0,
"type" => "w"
}
]
@profile_results [
success: %{"fields" => ["num"], "t_first" => 1},
record: [1],
success: %{
"bookmark" => "neo4j:bookmark:v1:tx48642",
"profile" => %{
"args" => %{
"DbHits" => 0,
"EstimatedRows" => 1.0,
"PageCacheHitRatio" => 0.0,
"PageCacheHits" => 0,
"PageCacheMisses" => 0,
"Rows" => 1,
"Time" => 25980,
"planner" => "COST",
"planner-impl" => "IDP",
"planner-version" => "3.5",
"runtime" => "COMPILED",
"runtime-impl" => "COMPILED",
"runtime-version" => "3.5",
"version" => "CYPHER 3.5"
},
"children" => [
%{
"args" => %{
"DbHits" => 0,
"EstimatedRows" => 1.0,
"Expressions" => "{num : $` AUTOINT0`}",
"PageCacheHitRatio" => 0.0,
"PageCacheHits" => 0,
"PageCacheMisses" => 0,
"Rows" => 1,
"Time" => 42285
},
"children" => [],
"dbHits" => 0,
"identifiers" => ["num"],
"operatorType" => "Projection",
"pageCacheHitRatio" => 0.0,
"pageCacheHits" => 0,
"pageCacheMisses" => 0,
"rows" => 1
}
],
"dbHits" => 0,
"identifiers" => ["num"],
"operatorType" => "ProduceResults",
"pageCacheHitRatio" => 0.0,
"pageCacheHits" => 0,
"pageCacheMisses" => 0,
"rows" => 1
},
"t_last" => 0,
"type" => "r"
}
]
describe "Response as Enumerable" do
test "a simple query" do
conn = Bolt.Sips.conn()
response = Bolt.Sips.query!(conn, "RETURN 300 AS r")
assert %Response{results: [%{"r" => 300}]} = response
assert response |> Enum.member?("r")
assert 1 = response |> Enum.count()
assert [%{"r" => 300}] = response |> Enum.take(1)
assert %{"r" => 300} = response |> Response.first()
end
@unwind %Bolt.Sips.Response{
records: [[1], [2], [3], [4], [5], [6], '\a', '\b', '\t', '\n'],
results: [
%{"n" => 1},
%{"n" => 2},
%{"n" => 3},
%{"n" => 4},
%{"n" => 5},
%{"n" => 6},
%{"n" => 7},
%{"n" => 8},
%{"n" => 9},
%{"n" => 10}
]
}
test "reduce: UNWIND range(1, 10) AS n RETURN n" do
sum = Enum.reduce(@unwind, 0, &(&1["n"] + &2))
assert 55 == sum
end
test "slice: UNWIND range(1, 10) AS n RETURN n" do
slice = Enum.slice(@unwind, 0..2)
assert [%{"n" => 1}, %{"n" => 2}, %{"n" => 3}] == slice
end
end
describe "Success" do
test "with valid EXPLAIN" do
assert %Response{
bookmark: nil,
fields: ["n"],
notifications: [],
plan: %{
"args" => %{
"EstimatedRows" => 1.0,
"planner" => "COST",
"planner-impl" => "IDP",
"planner-version" => "3.5",
"runtime" => "INTERPRETED",
"runtime-impl" => "INTERPRETED",
"runtime-version" => "3.5",
"version" => "CYPHER 3.5"
},
"children" => [
%{
"args" => %{"EstimatedRows" => 1.0},
"children" => [],
"identifiers" => ["n"],
"operatorType" => "Create"
}
],
"identifiers" => ["n"],
"operatorType" => "ProduceResults"
},
profile: nil,
records: [],
results: [],
stats: [],
type: "rw"
} = Response.transform!(@explain)
end
test "with Notifications" do
%Response{notifications: [notifications | _rest]} = Response.transform!(@notifications)
assert %{
"code" => "Neo.ClientNotification.Statement.CartesianProductWarning",
"description" => "bad juju",
"position" => %{"column" => 9, "line" => 1, "offset" => 8},
"severity" => "WARNING",
"title" => "This query builds a cartesian product between disconnected patterns."
} = notifications
end
test "with Profile (without results)" do
%Response{plan: nil, profile: profile, stats: stats} =
Response.transform!(@profile_no_results)
assert %{
"args" => %{
"DbHits" => 0,
"EstimatedRows" => 1.0,
"PageCacheHitRatio" => 0.0,
"PageCacheHits" => 0,
"PageCacheMisses" => 0,
"Rows" => 0,
"planner" => "COST",
"planner-impl" => "IDP",
"planner-version" => "3.5",
"runtime" => "SLOTTED",
"runtime-impl" => "SLOTTED",
"runtime-version" => "3.5",
"version" => "CYPHER 3.5"
},
"children" => [
%{
"args" => %{
"DbHits" => 0,
"EstimatedRows" => 1.0,
"PageCacheHitRatio" => 0.0,
"PageCacheHits" => 0,
"PageCacheMisses" => 0,
"Rows" => 0
},
"children" => [
%{
"args" => %{
"DbHits" => 3,
"EstimatedRows" => 1.0,
"PageCacheHitRatio" => 0.0,
"PageCacheHits" => 0,
"PageCacheMisses" => 0,
"Rows" => 1
},
"children" => [],
"dbHits" => 3,
"identifiers" => ["n"],
"operatorType" => "Create",
"pageCacheHitRatio" => 0.0,
"pageCacheHits" => 0,
"pageCacheMisses" => 0,
"rows" => 1
}
],
"dbHits" => 0,
"identifiers" => ["n"],
"operatorType" => "EmptyResult",
"pageCacheHitRatio" => 0.0,
"pageCacheHits" => 0,
"pageCacheMisses" => 0,
"rows" => 0
}
],
"dbHits" => 0,
"identifiers" => ["n"],
"operatorType" => "ProduceResults",
"pageCacheHitRatio" => 0.0,
"pageCacheHits" => 0,
"pageCacheMisses" => 0,
"rows" => 0
} = profile
assert %{
"labels-added" => 1,
"nodes-created" => 1,
"properties-set" => 1
} = stats
end
test "with Profile (with results)" do
%Response{plan: nil, profile: profile, stats: [], records: records, results: results} =
Response.transform!(@profile_results)
assert %{
"args" => %{
"DbHits" => 0,
"EstimatedRows" => 1.0,
"PageCacheHitRatio" => 0.0,
"PageCacheHits" => 0,
"PageCacheMisses" => 0,
"Rows" => 1,
"Time" => 25980,
"planner" => "COST",
"planner-impl" => "IDP",
"planner-version" => "3.5",
"runtime" => "COMPILED",
"runtime-impl" => "COMPILED",
"runtime-version" => "3.5",
"version" => "CYPHER 3.5"
},
"children" => [
%{
"args" => %{
"DbHits" => 0,
"EstimatedRows" => 1.0,
"Expressions" => "{num : $` AUTOINT0`}",
"PageCacheHitRatio" => 0.0,
"PageCacheHits" => 0,
"PageCacheMisses" => 0,
"Rows" => 1,
"Time" => 42285
},
"children" => [],
"dbHits" => 0,
"identifiers" => ["num"],
"operatorType" => "Projection",
"pageCacheHitRatio" => 0.0,
"pageCacheHits" => 0,
"pageCacheMisses" => 0,
"rows" => 1
}
],
"dbHits" => 0,
"identifiers" => ["num"],
"operatorType" => "ProduceResults",
"pageCacheHitRatio" => 0.0,
"pageCacheHits" => 0,
"pageCacheMisses" => 0,
"rows" => 1
} = profile
assert [%{"num" => 1}] = results
end
end
end
| 31.41535 | 96 | 0.367752 |
9efff685bcdaca194c9388301c27e38e6365da2b | 2,907 | exs | Elixir | test/services/translations_renderer_test.exs | leonardocouy/accent | 29fb324395ff998cc5cdc6947c60070ffabe647c | [
"BSD-3-Clause"
] | null | null | null | test/services/translations_renderer_test.exs | leonardocouy/accent | 29fb324395ff998cc5cdc6947c60070ffabe647c | [
"BSD-3-Clause"
] | null | null | null | test/services/translations_renderer_test.exs | leonardocouy/accent | 29fb324395ff998cc5cdc6947c60070ffabe647c | [
"BSD-3-Clause"
] | null | null | null | defmodule AccentTest.TranslationsRenderer do
use Accent.RepoCase
alias Accent.{
Repo,
User,
ProjectCreator,
Language,
Translation,
Document,
TranslationsRenderer
}
@user %User{email: "[email protected]"}
setup do
user = Repo.insert!(@user)
language = Repo.insert!(%Language{name: "English", slug: Ecto.UUID.generate()})
{:ok, project} = ProjectCreator.create(params: %{name: "My project", language_id: language.id}, user: user)
revision =
project
|> Repo.preload(:revisions)
|> Map.get(:revisions)
|> Enum.at(0)
|> Repo.preload(:language)
{:ok, [project: project, revision: revision]}
end
test "render json with filename", %{project: project, revision: revision} do
document = Repo.insert!(%Document{project_id: project.id, path: "my-test", format: "json"})
translation =
%Translation{
key: "a",
proposed_text: "B",
corrected_text: "A",
revision_id: revision.id,
document_id: document.id
}
|> Repo.insert!()
%{render: render} =
TranslationsRenderer.render(%{
translations: [translation],
document_format: document.format,
language: revision.language
})
expected_render = """
{
"a": "A"
}
"""
assert render == expected_render
end
test "render json with runtime error", %{project: project, revision: revision} do
document = Repo.insert!(%Document{project_id: project.id, path: "my-test", format: "json"})
translations =
[
%Translation{
key: "a.nested.foo",
proposed_text: "B",
corrected_text: "A",
revision_id: revision.id,
document_id: document.id
},
%Translation{
key: "a.nested",
proposed_text: "C",
corrected_text: "D",
revision_id: revision.id,
document_id: document.id
}
]
|> Enum.map(&Repo.insert!/1)
%{render: render} =
TranslationsRenderer.render(%{
translations: translations,
document_format: document.format,
language: revision.language
})
assert render == ""
end
test "render rails with locale", %{project: project, revision: revision} do
document = Repo.insert!(%Document{project_id: project.id, path: "my-test", format: "rails_yml"})
translation =
%Translation{
key: "a",
proposed_text: "A",
corrected_text: "A",
revision_id: revision.id,
document_id: document.id
}
|> Repo.insert!()
%{render: render} =
TranslationsRenderer.render(%{
translations: [translation],
document_format: document.format,
language: %Language{slug: "fr"}
})
expected_render = """
"fr":
"a": "A"
"""
assert render == expected_render
end
end
| 24.225 | 111 | 0.583075 |
9efff90c1ffec1e04414fc1133b40f3bd3527501 | 192 | ex | Elixir | apps/tai/lib/tai/iex/commands/disable_send_orders.ex | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 276 | 2018-01-16T06:36:06.000Z | 2021-03-20T21:48:01.000Z | apps/tai/lib/tai/iex/commands/disable_send_orders.ex | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 78 | 2020-10-12T06:21:43.000Z | 2022-03-28T09:02:00.000Z | apps/tai/lib/tai/iex/commands/disable_send_orders.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | 43 | 2018-06-09T09:54:51.000Z | 2021-03-07T07:35:17.000Z | defmodule Tai.IEx.Commands.DisableSendOrders do
@spec disable :: no_return
def disable do
Tai.Commander.disable_send_orders()
|> IO.puts()
IEx.dont_display_result()
end
end
| 19.2 | 47 | 0.723958 |
9efffde4b590da32e02efda598351914434077ef | 19,174 | ex | Elixir | lib/aws/generated/fsx.ex | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/fsx.ex | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/fsx.ex | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.FSx do
@moduledoc """
Amazon FSx is a fully managed service that makes it easy for storage and
application administrators to launch and use shared file storage.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2018-03-01",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "fsx",
global?: false,
protocol: "json",
service_id: "FSx",
signature_version: "v4",
signing_name: "fsx",
target_prefix: "AWSSimbaAPIService_v20180301"
}
end
@doc """
Use this action to associate one or more Domain Name Server (DNS) aliases with
an existing Amazon FSx for Windows File Server file system.
A file systen can have a maximum of 50 DNS aliases associated with it at any one
time. If you try to associate a DNS alias that is already associated with the
file system, FSx takes no action on that alias in the request. For more
information, see [Working with DNS Aliases](https://docs.aws.amazon.com/fsx/latest/WindowsGuide/managing-dns-aliases.html)
and [Walkthrough 5: Using DNS aliases to access your file system](https://docs.aws.amazon.com/fsx/latest/WindowsGuide/walkthrough05-file-system-custom-CNAME.html),
including additional steps you must take to be able to access your file system
using a DNS alias.
The system response shows the DNS aliases that Amazon FSx is attempting to
associate with the file system. Use the API operation to monitor the status of
the aliases Amazon FSx is associating with the file system.
"""
def associate_file_system_aliases(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AssociateFileSystemAliases", input, options)
end
@doc """
Cancels an existing Amazon FSx for Lustre data repository task if that task is
in either the `PENDING` or `EXECUTING` state.
When you cancel a task, Amazon FSx does the following.
* Any files that FSx has already exported are not reverted.
* FSx continues to export any files that are "in-flight" when the
cancel operation is received.
* FSx does not export any files that have not yet been exported.
"""
def cancel_data_repository_task(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CancelDataRepositoryTask", input, options)
end
@doc """
Creates a backup of an existing Amazon FSx file system.
Creating regular backups for your file system is a best practice, enabling you
to restore a file system from a backup if an issue arises with the original file
system.
For Amazon FSx for Lustre file systems, you can create a backup only for file
systems with the following configuration:
* a Persistent deployment type
* is *not* linked to a data respository.
For more information about backing up Amazon FSx for Lustre file systems, see
[Working with FSx for Lustre backups](https://docs.aws.amazon.com/fsx/latest/LustreGuide/using-backups-fsx.html).
For more information about backing up Amazon FSx for Windows file systems, see
[Working with FSx for Windows backups](https://docs.aws.amazon.com/fsx/latest/WindowsGuide/using-backups.html).
If a backup with the specified client request token exists, and the parameters
match, this operation returns the description of the existing backup. If a
backup specified client request token exists, and the parameters don't match,
this operation returns `IncompatibleParameterError`. If a backup with the
specified client request token doesn't exist, `CreateBackup` does the following:
* Creates a new Amazon FSx backup with an assigned ID, and an
initial lifecycle state of `CREATING`.
* Returns the description of the backup.
By using the idempotent operation, you can retry a `CreateBackup` operation
without the risk of creating an extra backup. This approach can be useful when
an initial call fails in a way that makes it unclear whether a backup was
created. If you use the same client request token and the initial call created a
backup, the operation returns a successful result because all the parameters are
the same.
The `CreateBackup` operation returns while the backup's lifecycle state is still
`CREATING`. You can check the backup creation status by calling the
`DescribeBackups` operation, which returns the backup state along with other
information.
"""
def create_backup(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateBackup", input, options)
end
@doc """
Creates an Amazon FSx for Lustre data repository task.
You use data repository tasks to perform bulk operations between your Amazon FSx
file system and its linked data repository. An example of a data repository task
is exporting any data and metadata changes, including POSIX metadata, to files,
directories, and symbolic links (symlinks) from your FSx file system to its
linked data repository. A `CreateDataRepositoryTask` operation will fail if a
data repository is not linked to the FSx file system. To learn more about data
repository tasks, see [Data Repository Tasks](https://docs.aws.amazon.com/fsx/latest/LustreGuide/data-repository-tasks.html).
To learn more about linking a data repository to your file system, see [Linking your file system to an S3
bucket](https://docs.aws.amazon.com/fsx/latest/LustreGuide/create-fs-linked-data-repo.html).
"""
def create_data_repository_task(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateDataRepositoryTask", input, options)
end
@doc """
Creates a new, empty Amazon FSx file system.
If a file system with the specified client request token exists and the
parameters match, `CreateFileSystem` returns the description of the existing
file system. If a file system specified client request token exists and the
parameters don't match, this call returns `IncompatibleParameterError`. If a
file system with the specified client request token doesn't exist,
`CreateFileSystem` does the following:
* Creates a new, empty Amazon FSx file system with an assigned ID,
and an initial lifecycle state of `CREATING`.
* Returns the description of the file system.
This operation requires a client request token in the request that Amazon FSx
uses to ensure idempotent creation. This means that calling the operation
multiple times with the same client request token has no effect. By using the
idempotent operation, you can retry a `CreateFileSystem` operation without the
risk of creating an extra file system. This approach can be useful when an
initial call fails in a way that makes it unclear whether a file system was
created. Examples are if a transport level timeout occurred, or your connection
was reset. If you use the same client request token and the initial call created
a file system, the client receives success as long as the parameters are the
same.
The `CreateFileSystem` call returns while the file system's lifecycle state is
still `CREATING`. You can check the file-system creation status by calling the
`DescribeFileSystems` operation, which returns the file system state along with
other information.
"""
def create_file_system(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateFileSystem", input, options)
end
@doc """
Creates a new Amazon FSx file system from an existing Amazon FSx backup.
If a file system with the specified client request token exists and the
parameters match, this operation returns the description of the file system. If
a client request token specified by the file system exists and the parameters
don't match, this call returns `IncompatibleParameterError`. If a file system
with the specified client request token doesn't exist, this operation does the
following:
* Creates a new Amazon FSx file system from backup with an assigned
ID, and an initial lifecycle state of `CREATING`.
* Returns the description of the file system.
Parameters like Active Directory, default share name, automatic backup, and
backup settings default to the parameters of the file system that was backed up,
unless overridden. You can explicitly supply other settings.
By using the idempotent operation, you can retry a `CreateFileSystemFromBackup`
call without the risk of creating an extra file system. This approach can be
useful when an initial call fails in a way that makes it unclear whether a file
system was created. Examples are if a transport level timeout occurred, or your
connection was reset. If you use the same client request token and the initial
call created a file system, the client receives success as long as the
parameters are the same.
The `CreateFileSystemFromBackup` call returns while the file system's lifecycle
state is still `CREATING`. You can check the file-system creation status by
calling the `DescribeFileSystems` operation, which returns the file system state
along with other information.
"""
def create_file_system_from_backup(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateFileSystemFromBackup", input, options)
end
@doc """
Deletes an Amazon FSx backup, deleting its contents.
After deletion, the backup no longer exists, and its data is gone.
The `DeleteBackup` call returns instantly. The backup will not show up in later
`DescribeBackups` calls.
The data in a deleted backup is also deleted and can't be recovered by any
means.
"""
def delete_backup(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteBackup", input, options)
end
@doc """
Deletes a file system, deleting its contents.
After deletion, the file system no longer exists, and its data is gone. Any
existing automatic backups will also be deleted.
By default, when you delete an Amazon FSx for Windows File Server file system, a
final backup is created upon deletion. This final backup is not subject to the
file system's retention policy, and must be manually deleted.
The `DeleteFileSystem` action returns while the file system has the `DELETING`
status. You can check the file system deletion status by calling the
`DescribeFileSystems` action, which returns a list of file systems in your
account. If you pass the file system ID for a deleted file system, the
`DescribeFileSystems` returns a `FileSystemNotFound` error.
Deleting an Amazon FSx for Lustre file system will fail with a 400 BadRequest if
a data repository task is in a `PENDING` or `EXECUTING` state.
The data in a deleted file system is also deleted and can't be recovered by any
means.
"""
def delete_file_system(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteFileSystem", input, options)
end
@doc """
Returns the description of specific Amazon FSx backups, if a `BackupIds` value
is provided for that backup.
Otherwise, it returns all backups owned by your AWS account in the AWS Region of
the endpoint that you're calling.
When retrieving all backups, you can optionally specify the `MaxResults`
parameter to limit the number of backups in a response. If more backups remain,
Amazon FSx returns a `NextToken` value in the response. In this case, send a
later request with the `NextToken` request parameter set to the value of
`NextToken` from the last response.
This action is used in an iterative process to retrieve a list of your backups.
`DescribeBackups` is called first without a `NextToken`value. Then the action
continues to be called with the `NextToken` parameter set to the value of the
last `NextToken` value until a response has no `NextToken`.
When using this action, keep the following in mind:
* The implementation might return fewer than `MaxResults` file
system descriptions while still including a `NextToken` value.
* The order of backups returned in the response of one
`DescribeBackups` call and the order of backups returned across the responses of
a multi-call iteration is unspecified.
"""
def describe_backups(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeBackups", input, options)
end
@doc """
Returns the description of specific Amazon FSx for Lustre data repository tasks,
if one or more `TaskIds` values are provided in the request, or if filters are
used in the request.
You can use filters to narrow the response to include just tasks for specific
file systems, or tasks in a specific lifecycle state. Otherwise, it returns all
data repository tasks owned by your AWS account in the AWS Region of the
endpoint that you're calling.
When retrieving all tasks, you can paginate the response by using the optional
`MaxResults` parameter to limit the number of tasks returned in a response. If
more tasks remain, Amazon FSx returns a `NextToken` value in the response. In
this case, send a later request with the `NextToken` request parameter set to
the value of `NextToken` from the last response.
"""
def describe_data_repository_tasks(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDataRepositoryTasks", input, options)
end
@doc """
Returns the DNS aliases that are associated with the specified Amazon FSx for
Windows File Server file system.
A history of all DNS aliases that have been associated with and disassociated
from the file system is available in the list of `AdministrativeAction` provided
in the `DescribeFileSystems` operation response.
"""
def describe_file_system_aliases(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeFileSystemAliases", input, options)
end
@doc """
Returns the description of specific Amazon FSx file systems, if a
`FileSystemIds` value is provided for that file system.
Otherwise, it returns descriptions of all file systems owned by your AWS account
in the AWS Region of the endpoint that you're calling.
When retrieving all file system descriptions, you can optionally specify the
`MaxResults` parameter to limit the number of descriptions in a response. If
more file system descriptions remain, Amazon FSx returns a `NextToken` value in
the response. In this case, send a later request with the `NextToken` request
parameter set to the value of `NextToken` from the last response.
This action is used in an iterative process to retrieve a list of your file
system descriptions. `DescribeFileSystems` is called first without a
`NextToken`value. Then the action continues to be called with the `NextToken`
parameter set to the value of the last `NextToken` value until a response has no
`NextToken`.
When using this action, keep the following in mind:
* The implementation might return fewer than `MaxResults` file
system descriptions while still including a `NextToken` value.
* The order of file systems returned in the response of one
`DescribeFileSystems` call and the order of file systems returned across the
responses of a multicall iteration is unspecified.
"""
def describe_file_systems(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeFileSystems", input, options)
end
@doc """
Use this action to disassociate, or remove, one or more Domain Name Service
(DNS) aliases from an Amazon FSx for Windows File Server file system.
If you attempt to disassociate a DNS alias that is not associated with the file
system, Amazon FSx responds with a 400 Bad Request. For more information, see
[Working with DNS Aliases](https://docs.aws.amazon.com/fsx/latest/WindowsGuide/managing-dns-aliases.html).
The system generated response showing the DNS aliases that Amazon FSx is
attempting to disassociate from the file system. Use the API operation to
monitor the status of the aliases Amazon FSx is disassociating with the file
system.
"""
def disassociate_file_system_aliases(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisassociateFileSystemAliases", input, options)
end
@doc """
Lists tags for an Amazon FSx file systems and backups in the case of Amazon FSx
for Windows File Server.
When retrieving all tags, you can optionally specify the `MaxResults` parameter
to limit the number of tags in a response. If more tags remain, Amazon FSx
returns a `NextToken` value in the response. In this case, send a later request
with the `NextToken` request parameter set to the value of `NextToken` from the
last response.
This action is used in an iterative process to retrieve a list of your tags.
`ListTagsForResource` is called first without a `NextToken`value. Then the
action continues to be called with the `NextToken` parameter set to the value of
the last `NextToken` value until a response has no `NextToken`.
When using this action, keep the following in mind:
* The implementation might return fewer than `MaxResults` file
system descriptions while still including a `NextToken` value.
* The order of tags returned in the response of one
`ListTagsForResource` call and the order of tags returned across the responses
of a multi-call iteration is unspecified.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Tags an Amazon FSx resource.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
This action removes a tag from an Amazon FSx resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Use this operation to update the configuration of an existing Amazon FSx file
system.
You can update multiple properties in a single request.
For Amazon FSx for Windows File Server file systems, you can update the
following properties:
* AutomaticBackupRetentionDays
* DailyAutomaticBackupStartTime
* SelfManagedActiveDirectoryConfiguration
* StorageCapacity
* ThroughputCapacity
* WeeklyMaintenanceStartTime
For Amazon FSx for Lustre file systems, you can update the following properties:
* AutoImportPolicy
* AutomaticBackupRetentionDays
* DailyAutomaticBackupStartTime
* StorageCapacity
* WeeklyMaintenanceStartTime
"""
def update_file_system(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateFileSystem", input, options)
end
end
| 44.281755 | 165 | 0.755189 |
7300540a9c93874bfff35cb6b0cf2969de4236dc | 383 | ex | Elixir | lib/basic_crud/users/user.ex | Thibault-Santonja/Phoenix_tests | 429100499849837d1449495db354fc1006ceb86c | [
"MIT"
] | null | null | null | lib/basic_crud/users/user.ex | Thibault-Santonja/Phoenix_tests | 429100499849837d1449495db354fc1006ceb86c | [
"MIT"
] | null | null | null | lib/basic_crud/users/user.ex | Thibault-Santonja/Phoenix_tests | 429100499849837d1449495db354fc1006ceb86c | [
"MIT"
] | null | null | null | defmodule BasicCrud.Users.User do
use Ecto.Schema
import Ecto.Changeset
schema "users" do
field :admin, :boolean, default: false
field :name, :string
field :password, :string
timestamps()
end
@doc false
def changeset(user, attrs) do
user
|> cast(attrs, [:name, :password, :admin])
|> validate_required([:name, :password, :admin])
end
end
| 19.15 | 52 | 0.655352 |
7300885b049c4b007009ccd7df9b15b6fc1f71b9 | 7,969 | ex | Elixir | lib/ex_json_schema/schema.ex | hzamani/ex_json_schema | ad86d4482a8bda66ba9e6dc954b69261051047bf | [
"MIT"
] | null | null | null | lib/ex_json_schema/schema.ex | hzamani/ex_json_schema | ad86d4482a8bda66ba9e6dc954b69261051047bf | [
"MIT"
] | null | null | null | lib/ex_json_schema/schema.ex | hzamani/ex_json_schema | ad86d4482a8bda66ba9e6dc954b69261051047bf | [
"MIT"
] | null | null | null | defmodule ExJsonSchema.Schema do
defmodule UnsupportedSchemaVersionError do
defexception message: "unsupported schema version, only draft 4 is supported"
end
defmodule InvalidSchemaError do
defexception message: "invalid schema"
end
defmodule UndefinedRemoteSchemaResolverError do
defexception message: "trying to resolve a remote schema but no remote schema resolver function is defined"
end
alias ExJsonSchema.Schema.Draft4
alias ExJsonSchema.Schema.Root
@type resolved :: %{String.t => ExJsonSchema.json_value | (Root.t -> {Root.t, resolved})}
@current_draft_schema_url "http://json-schema.org/schema"
@draft4_schema_url "http://json-schema.org/draft-04/schema"
@spec resolve(Root.t) :: Root.t | no_return
def resolve(root = %Root{}), do: resolve_root(root)
@spec resolve(ExJsonSchema.json) :: Root.t | no_return
def resolve(schema = %{}), do: resolve_root(%Root{schema: schema})
@spec get_ref_schema(Root.t, [:root | String.t]) :: ExJsonSchema.json
def get_ref_schema(root = %Root{}, [:root | path] = ref) do
get_ref_schema_with_schema(root.schema, path, ref)
end
def get_ref_schema(root = %Root{}, [url | path] = ref) when is_binary(url) do
get_ref_schema_with_schema(root.refs[url], path, ref)
end
defp resolve_root(root) do
assert_supported_schema_version(Map.get(root.schema, "$schema", @current_draft_schema_url <> "#"))
assert_valid_schema(root.schema)
{root, schema} = resolve_with_root(root, root.schema)
%{root | schema: schema}
end
defp assert_supported_schema_version(version) do
unless supported_schema_version?(version), do: raise UnsupportedSchemaVersionError
end
defp assert_valid_schema(schema) do
unless meta?(schema) do
case ExJsonSchema.Validator.validate(resolve(Draft4.schema), schema) do
{:error, errors} ->
raise InvalidSchemaError, message: "schema did not pass validation against its meta-schema: #{inspect(errors)}"
_ -> nil
end
end
end
defp supported_schema_version?(version) do
case version do
@current_draft_schema_url <> _ -> true
@draft4_schema_url <> _ -> true
_ -> false
end
end
defp resolve_with_root(root, schema, scope \\ "")
defp resolve_with_root(root, schema = %{"id" => id}, scope) when is_binary(id), do: do_resolve(root, schema, scope <> id)
defp resolve_with_root(root, schema = %{}, scope), do: do_resolve(root, schema, scope)
defp resolve_with_root(root, non_schema, _scope), do: {root, non_schema}
defp do_resolve(root, schema, scope) do
{root, schema} = Enum.reduce schema, {root, %{}}, fn (property, {root, schema}) ->
{root, {k, v}} = resolve_property(root, property, scope)
{root, Map.put(schema, k, v)}
end
{root, schema |> sanitize_properties_attribute |> sanitize_additional_items_attribute}
end
defp resolve_property(root, {key, value}, scope) when is_map(value) do
{root, resolved} = resolve_with_root(root, value, scope)
{root, {key, resolved}}
end
defp resolve_property(root, {key, values}, scope) when is_list(values) do
{root, values} = Enum.reduce values, {root, []}, fn (value, {root, values}) ->
{root, resolved} = resolve_with_root(root, value, scope)
{root, [resolved | values]}
end
{root, {key, Enum.reverse(values)}}
end
defp resolve_property(root, {"$ref", ref}, scope) do
scoped_ref = case ref do
"http://" <> _ -> ref
"https://" <> _ -> ref
_else -> scope <> ref |> String.replace("##", "#")
end
{root, path} = resolve_ref(root, scoped_ref)
{root, {"$ref", path}}
end
defp resolve_property(root, tuple, _), do: {root, tuple}
defp resolve_ref(root, "#") do
{root, [root.location]}
end
defp resolve_ref(root, ref) do
[url | fragments] = String.split(ref, "#")
fragment = get_fragment(fragments, ref)
{root, path} = root_and_path_for_url(root, fragment, url)
assert_reference_valid(path, root, ref)
{root, path}
end
defp get_fragment([], _), do: nil
defp get_fragment([""], _), do: nil
defp get_fragment([fragment = "/" <> _], _), do: fragment
defp get_fragment(_, ref), do: raise InvalidSchemaError, message: "invalid reference #{ref}"
defp root_and_path_for_url(root, fragment, "") do
{root, [root.location | relative_path(fragment)]}
end
defp root_and_path_for_url(root, fragment, url) do
root = resolve_and_cache_remote_schema(root, url)
{root, [url | relative_path(fragment)]}
end
defp relative_path(nil), do: []
defp relative_path(fragment), do: relative_ref_path(fragment)
defp relative_ref_path(ref) do
["" | keys] = unescaped_ref_segments(ref)
Enum.map keys, fn key ->
case key =~ ~r/^\d+$/ do
true ->
String.to_integer(key)
false -> key
end
end
end
defp resolve_and_cache_remote_schema(root, url) do
if root.refs[url], do: root, else: fetch_and_resolve_remote_schema(root, url)
end
defp fetch_and_resolve_remote_schema(root, url)
when url == @current_draft_schema_url or url == @draft4_schema_url do
resolve_remote_schema(root, url, Draft4.schema)
end
defp fetch_and_resolve_remote_schema(root, url) do
resolve_remote_schema(root, url, fetch_remote_schema(url))
end
defp resolve_remote_schema(root, url, remote_schema) do
root = root_with_ref(root, url, remote_schema)
resolved_root = resolve_root(%{root | schema: remote_schema, location: url})
root = %{root | refs: resolved_root.refs}
root_with_ref(root, url, resolved_root.schema)
end
defp root_with_ref(root, url, ref) do
%{root | refs: Map.put(root.refs, url, ref)}
end
defp fetch_remote_schema(url) do
case remote_schema_resolver() do
fun when is_function(fun) -> fun.(url)
{mod, fun_name} -> apply(mod, fun_name, [url])
end
end
defp remote_schema_resolver do
Application.get_env(:ex_json_schema, :remote_schema_resolver) || fn _url -> raise UndefinedRemoteSchemaResolverError end
end
defp assert_reference_valid(path, root, _ref) do
get_ref_schema(root, path)
end
defp sanitize_properties_attribute(schema) do
if needs_properties_attribute?(schema), do: Map.put(schema, "properties", %{}), else: schema
end
defp needs_properties_attribute?(schema) do
Enum.any?(~w(patternProperties additionalProperties), &Map.has_key?(schema, &1))
and not Map.has_key?(schema, "properties")
end
defp sanitize_additional_items_attribute(schema) do
if needs_additional_items_attribute?(schema), do: Map.put(schema, "additionalItems", true), else: schema
end
defp needs_additional_items_attribute?(schema) do
Map.has_key?(schema, "items") and not Map.has_key?(schema, "additionalItems")
end
defp unescaped_ref_segments(ref) do
ref
|> String.split("/")
|> Enum.map(fn segment ->
segment
|> String.replace("~0", "~")
|> String.replace("~1", "/")
|> URI.decode
end)
end
defp meta?(schema) do
String.starts_with?(Map.get(schema, "id", ""), @draft4_schema_url)
end
defp get_ref_schema_with_schema(nil, _, ref) do
raise InvalidSchemaError, message: "reference #{ref_to_string(ref)} could not be resolved"
end
defp get_ref_schema_with_schema(schema, [], _) do
schema
end
defp get_ref_schema_with_schema(schema, [key | path], ref) when is_binary(key) do
get_ref_schema_with_schema(Map.get(schema, key), path, ref)
end
defp get_ref_schema_with_schema(schema, [idx | path], ref) when is_integer(idx) do
try do
get_ref_schema_with_schema(:lists.nth(idx + 1, schema), path, ref)
catch
:error, :function_clause ->
raise InvalidSchemaError, message: "reference #{ref_to_string(ref)} could not be resolved"
end
end
defp ref_to_string([:root | path]), do: ["#" | path] |> Enum.join("/")
defp ref_to_string([url | path]), do: [url <> "#" | path] |> Enum.join("/")
end
| 33.343096 | 124 | 0.68641 |
73009365cc3c98b943da7f108be3e9d31b56673c | 3,396 | ex | Elixir | clients/cloud_search/lib/google_api/cloud_search/v1/model/timestamp_operator_options.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_search/lib/google_api/cloud_search/v1/model/timestamp_operator_options.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_search/lib/google_api/cloud_search/v1/model/timestamp_operator_options.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudSearch.V1.Model.TimestampOperatorOptions do
@moduledoc """
Used to provide a search operator for timestamp properties. This is optional. Search operators let users restrict the query to specific fields relevant to the type of item being searched.
## Attributes
* `greaterThanOperatorName` (*type:* `String.t`, *default:* `nil`) - Indicates the operator name required in the query in order to isolate the timestamp property using the greater-than operator. For example, if greaterThanOperatorName is *closedafter* and the property's name is *closeDate*, then queries like *closedafter:<value>* show results only where the value of the property named *closeDate* is later than *<value>*. The operator name can only contain lowercase letters (a-z). The maximum length is 32 characters.
* `lessThanOperatorName` (*type:* `String.t`, *default:* `nil`) - Indicates the operator name required in the query in order to isolate the timestamp property using the less-than operator. For example, if lessThanOperatorName is *closedbefore* and the property's name is *closeDate*, then queries like *closedbefore:<value>* show results only where the value of the property named *closeDate* is earlier than *<value>*. The operator name can only contain lowercase letters (a-z). The maximum length is 32 characters.
* `operatorName` (*type:* `String.t`, *default:* `nil`) - Indicates the operator name required in the query in order to isolate the timestamp property. For example, if operatorName is *closedon* and the property's name is *closeDate*, then queries like *closedon:<value>* show results only where the value of the property named *closeDate* matches *<value>*. By contrast, a search that uses the same *<value>* without an operator returns all items where *<value>* matches the value of any String properties or text within the content field for the item. The operator name can only contain lowercase letters (a-z). The maximum length is 32 characters.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:greaterThanOperatorName => String.t() | nil,
:lessThanOperatorName => String.t() | nil,
:operatorName => String.t() | nil
}
field(:greaterThanOperatorName)
field(:lessThanOperatorName)
field(:operatorName)
end
defimpl Poison.Decoder, for: GoogleApi.CloudSearch.V1.Model.TimestampOperatorOptions do
def decode(value, options) do
GoogleApi.CloudSearch.V1.Model.TimestampOperatorOptions.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudSearch.V1.Model.TimestampOperatorOptions do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 64.075472 | 654 | 0.759128 |
7300d0a2ea58d87211b7f7e19bf9a106f0e4a2f7 | 9,356 | ex | Elixir | lib/elixir/lib/protocol/consolidation.ex | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/protocol/consolidation.ex | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/protocol/consolidation.ex | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | defmodule Protocol.Consolidation do
@moduledoc """
Module responsible for consolidating protocols and helpers for
extracting protocols and implementations from code paths for
consolidation.
"""
@doc """
Extract all protocols from the given paths.
The paths can be either a char list or a string. Internally
they are worked on as char lists, so passing them as lists
avoid extra conversion.
## Examples
# Get Elixir's ebin and retrieve all protocols
iex> path = :code.lib_dir(:elixir, :ebin)
iex> mods = Protocol.Consolidation.extract_protocols([path])
iex> Enumerable in mods
true
"""
@spec extract_protocols([char_list | String.t]) :: [atom]
def extract_protocols(paths) do
extract_matching_by_attribute paths, 'Elixir.',
fn module, attributes ->
case attributes[:protocol] do
[fallback_to_any: _, consolidated: _] -> module
_ -> nil
end
end
end
@doc """
Extract all types implemented for the given protocol from
the given paths.
The paths can be either a char list or a string. Internally
they are worked on as char lists, so passing them as lists
avoid extra conversion.
## Examples
# Get Elixir's ebin and retrieve all protocols
iex> path = :code.lib_dir(:elixir, :ebin)
iex> mods = Protocol.Consolidation.extract_impls(Enumerable, [path])
iex> List in mods
true
"""
@spec extract_impls(module, [char_list | String.t]) :: [atom]
def extract_impls(protocol, paths) when is_atom(protocol) do
prefix = atom_to_list(protocol) ++ '.'
extract_matching_by_attribute paths, prefix, fn
_mod, attributes ->
case attributes[:impl] do
[protocol: ^protocol, for: for] -> for
_ -> nil
end
end
end
defp extract_matching_by_attribute(paths, prefix, callback) do
lc path inlist paths,
file inlist list_dir(path),
mod = extract_from_file(path, file, prefix, callback),
do: mod
end
defp list_dir(path) when is_list(path) do
case :file.list_dir(path) do
{ :ok, files } -> files
_ -> []
end
end
defp list_dir(path), do: list_dir(to_char_list(path))
defp extract_from_file(path, file, prefix, callback) do
if :lists.prefix(prefix, file) and Path.extname(file) == '.beam' do
extract_from_beam(Path.join(path, file), callback)
end
end
defp extract_from_beam(file, callback) do
case :beam_lib.chunks(file, [:attributes]) do
{:ok, { module, [attributes: attributes] } } ->
callback.(module, attributes)
_ ->
nil
end
end
defmacrop if_ok(expr, call) do
quote do
case unquote(expr) do
{ :ok, var } -> unquote(Macro.pipe(quote(do: var), call))
other -> other
end
end
end
@doc """
Receives a protocol and a list of implementations and
consolidates the given protocol. Consolidation happens
by changing the protocol `impl_for` in the abstract
format to have fast lookup rules.
It returns the updated version of the protocol bytecode.
A given bytecode or protocol implementation can be checked
to be consolidated or not by analyzing the protocol
attribute:
Enumerable.__info__(:attributes)[:protocol]
If the first element of the tuple is true, it means
the protocol was consolidated.
This function does not load the protocol at any point
nor loads the new bytecode for the compiled module.
"""
@spec apply_to(module, [module]) ::
{ :ok, binary } |
{ :error, :not_a_protocol } |
{ :error, :no_beam_info }
def apply_to(protocol, types) when is_atom(protocol) do
ensure_protocol(protocol)
|> if_ok(change_debug_info types)
|> if_ok(compile)
end
# Ensure the given module is loaded and is a protocol.
defp ensure_protocol(protocol) do
case :beam_lib.chunks(beam_file(protocol), [:abstract_code, :attributes]) do
{ :ok, { ^protocol, [abstract_code: { _raw, abstract_code },
attributes: attributes] } } ->
case attributes[:protocol] do
[fallback_to_any: any, consolidated: _] ->
{ :ok, { protocol, any, abstract_code } }
_ ->
{ :error, :not_a_protocol }
end
_ ->
{ :error, :no_beam_info }
end
end
defp beam_file(module) when is_atom(module) do
case :code.which(module) do
:non_existing -> module
file -> file
end
end
# Change the debug information to the optimized
# impl_for/1 dispatch version.
defp change_debug_info({ protocol, any, code }, types) do
types = if any, do: types, else: List.delete(types, Any)
records = types -- Protocol.builtin
builtin = Protocol.builtin -- (Protocol.builtin -- types)
builtin = if records != [], do: [Record|builtin], else: builtin
change_impl_for(code, protocol, builtin, records, false, [])
end
defp change_impl_for([{ :attribute, line, :protocol, _ }|t], protocol, builtin, records, _, acc) do
attr = [fallback_to_any: Any in builtin, consolidated: true]
change_impl_for(t, protocol, builtin, records, true,
[{ :attribute, line, :protocol, attr }|acc])
end
defp change_impl_for([{ :function, line, :impl_for, 1, _ }|t], protocol, builtin, records, is_protocol, acc) do
clauses = lc type inlist builtin, do: clause_for(type, protocol, line)
unless Any in builtin do
clauses = clauses ++ [fallback_clause_for(nil, protocol, line)]
end
change_impl_for(t, protocol, builtin, records, is_protocol,
[{ :function, line, :impl_for, 1, clauses }|acc])
end
defp change_impl_for([{ :function, line, :rec_impl_for, 1, _ }|t], protocol, builtin, records, is_protocol, acc) do
fallback = if Tuple in builtin, do: Module.concat(protocol, Tuple)
clauses = lc type inlist records, do: record_clause_for(type, protocol, line)
clauses = clauses ++ [fallback_clause_for(fallback, protocol, line)]
change_impl_for(t, protocol, builtin, records, is_protocol,
[{ :function, line, :rec_impl_for, 1, clauses }|acc])
end
defp change_impl_for([h|t], protocol, info, types, is_protocol, acc) do
change_impl_for(t, protocol, info, types, is_protocol, [h|acc])
end
defp change_impl_for([], protocol, _info, _types, is_protocol, acc) do
if is_protocol do
{ :ok, { protocol, Enum.reverse(acc) } }
else
{ :error, :not_a_protocol }
end
end
defp clause_for(Tuple, protocol, line), do: builtin_clause_for(Tuple, :is_tuple, protocol, line)
defp clause_for(Atom, protocol, line), do: builtin_clause_for(Atom, :is_atom, protocol, line)
defp clause_for(List, protocol, line), do: builtin_clause_for(List, :is_list, protocol, line)
defp clause_for(BitString, protocol, line), do: builtin_clause_for(BitString, :is_bitstring, protocol, line)
defp clause_for(Integer, protocol, line), do: builtin_clause_for(Integer, :is_integer, protocol, line)
defp clause_for(Float, protocol, line), do: builtin_clause_for(Float, :is_float, protocol, line)
defp clause_for(Function, protocol, line), do: builtin_clause_for(Function, :is_function, protocol, line)
defp clause_for(PID, protocol, line), do: builtin_clause_for(PID, :is_pid, protocol, line)
defp clause_for(Port, protocol, line), do: builtin_clause_for(Port, :is_port, protocol, line)
defp clause_for(Reference, protocol, line), do: builtin_clause_for(Reference, :is_reference, protocol, line)
defp clause_for(Any, protocol, line) do
{:clause, line, [{:var, line, :_}], [],
[{ :atom, line, Module.concat(protocol, Any) }]}
end
defp clause_for(Record, _protocol, line) do
{:clause, line, [{:var, line, :x}],
[[{:op, line, :andalso,
{:call, line,
{:remote, line, {:atom, line, :erlang}, {:atom, line, :is_tuple}},
[{:var, line, :x}]},
{:call, line,
{:remote, line, {:atom, line, :erlang}, {:atom, line, :is_atom}},
[{:call, line,
{:remote, line, {:atom, line, :erlang}, {:atom, line, :element}},
[{:integer, line, 1}, {:var, line, :x}]
}]},
}]],
[{:call, line,
{:atom, line, :rec_impl_for},
[{:call, line,
{:remote, line, {:atom, line, :erlang}, {:atom, line, :element}},
[{:integer, line, 1}, {:var, line, :x}]}]}]}
end
defp builtin_clause_for(mod, guard, protocol, line) do
{:clause, line,
[{:var, line, :x}],
[[{:call, line,
{:remote, line, {:atom, line, :erlang}, {:atom, line, guard}},
[{:var, line, :x}],
}]],
[{:atom, line, Module.concat(protocol, mod)}]}
end
defp record_clause_for(other, protocol, line) do
{:clause, line, [{:atom, line, other}], [],
[{:atom, line, Module.concat(protocol, other)}]}
end
defp fallback_clause_for(value, _protocol, line) do
{:clause, line, [{:var, line, :_}], [],
[{ :atom, line, value }]}
end
# Finally compile the module and emit its bytecode.
defp compile({ protocol, code }) do
opts = if Code.compiler_options[:debug_info], do: [:debug_info], else: []
{ :ok, ^protocol, binary, _warnings } = :compile.forms(code, [:return|opts])
{ :ok, binary }
end
end
| 35.041199 | 117 | 0.640124 |
73011197701780c6d714082c090b46f42a441e39 | 1,895 | ex | Elixir | clients/gmail/lib/google_api/gmail/v1/model/auto_forwarding.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/gmail/lib/google_api/gmail/v1/model/auto_forwarding.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/gmail/lib/google_api/gmail/v1/model/auto_forwarding.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Gmail.V1.Model.AutoForwarding do
@moduledoc """
Auto-forwarding settings for an account.
## Attributes
* `disposition` (*type:* `String.t`, *default:* `nil`) - The state that a message should be left in after it has been forwarded.
* `emailAddress` (*type:* `String.t`, *default:* `nil`) - Email address to which all incoming messages are forwarded. This email address must be a verified member of the forwarding addresses.
* `enabled` (*type:* `boolean()`, *default:* `nil`) - Whether all incoming mail is automatically forwarded to another address.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:disposition => String.t(),
:emailAddress => String.t(),
:enabled => boolean()
}
field(:disposition)
field(:emailAddress)
field(:enabled)
end
defimpl Poison.Decoder, for: GoogleApi.Gmail.V1.Model.AutoForwarding do
def decode(value, options) do
GoogleApi.Gmail.V1.Model.AutoForwarding.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Gmail.V1.Model.AutoForwarding do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.754717 | 195 | 0.7219 |
730136c2fc34ce99ba5fdccc93ae179118e34781 | 5,756 | exs | Elixir | mix.exs | the-shank/extika | 1e5b58caad8976194b5b63470124f8ab7b95e363 | [
"MIT"
] | null | null | null | mix.exs | the-shank/extika | 1e5b58caad8976194b5b63470124f8ab7b95e363 | [
"MIT"
] | null | null | null | mix.exs | the-shank/extika | 1e5b58caad8976194b5b63470124f8ab7b95e363 | [
"MIT"
] | null | null | null | defmodule ExTika.Mixfile do
use Mix.Project
def project do
[
app: :extika,
description: "Wrapper around Apache Tika",
version: "0.0.3",
package: package(),
elixir: "~> 1.1",
compilers: [:tika | Mix.compilers],
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps(),
]
end
def application do
[applications: [:logger]]
end
defp aliases do
[
clean: ["clean", "clean.tika"],
]
end
defp package do
[
name: :extika,
files: ["lib", "mix.exs", "README*", "LICENSE*", ".tika-version"],
maintainers: ["Andrew Dunham"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/andrew-d/extika",
"Docs" => "https://andrew-d.github.io/extika/"},
]
end
defp deps do
[
{:poison, "~> 2.0"},
# Development / testing dependencies
{:dialyxir, "~> 0.3.5", only: :test},
{:ex_doc, "~> 0.12", only: :dev},
]
end
def trim(s) do
if :erlang.function_exported(String, :trim, 1) do
String.trim(s)
else
String.strip(s)
end
end
end
defmodule Mix.Tasks.Compile.Tika do
@shortdoc "Downloads the Apache Tika JAR file(s)"
def run(_) do
version = File.read!(".tika-version")
|> ExTika.Mixfile.trim
fetch_one(
"tika-#{version}.jar",
"https://archive.apache.org/dist/tika/tika-app-#{version}.jar",
"4f377b42e122f92c3f1f3b4702029cf0642c7d6f3ce872a0dfb1472eac65be44"
)
Mix.shell.info("Done!")
end
# Fetches a single file and verifies the checksum.
defp fetch_one(fname, url, sum) do
dest = Path.join("priv", fname)
# If the file doesn't exist, download it.
if !File.exists?(dest) do
Mix.shell.info("Fetching: #{fname}")
:ok = fetch_url(url, dest)
end
Mix.shell.info("Verifying checksum of: #{fname}")
case verify_checksum(dest, sum) do
:ok ->
nil
{:error, msg} ->
Mix.shell.error(msg)
File.rm(dest)
exit(:checksum_mismatch)
end
:ok
end
# Streams the contents of a given URL to a file on disk
defp fetch_url(url, dest) do
# Ensure the directory exists
File.mkdir_p!(Path.dirname(dest))
{:ok, _} = Application.ensure_all_started(:ssl)
{:ok, _} = Application.ensure_all_started(:inets)
# Starting an HTTP client profile allows us to scope
# the effects of using an HTTP proxy to this function
{:ok, _pid} = :inets.start(:httpc, [{:profile, :extika}])
# Set proxy config.
proxy_config()
headers = [{'user-agent', 'ExTika/#{System.version}'}]
request = {:binary.bin_to_list(url), headers}
http_options = [relaxed: true] ++ proxy_auth(url)
options = [stream: :binary.bin_to_list(dest)]
case :httpc.request(:get, request, http_options, options, :extika) do
{:ok, :saved_to_file} ->
:ok
{:ok, {{_, status, _}, _, _}} ->
{:remote, "httpc request failed with: {:bad_status_code, #{status}}"}
{:error, reason} ->
{:remote, "httpc request failed with: #{inspect reason}"}
end
after
:inets.stop(:httpc, :extika)
end
# Sets any options necessary to configure HTTP proxies
defp proxy_config() do
http_proxy = System.get_env("HTTP_PROXY") || System.get_env("http_proxy")
https_proxy = System.get_env("HTTPS_PROXY") || System.get_env("https_proxy")
if http_proxy, do: proxy(http_proxy)
if https_proxy, do: proxy(https_proxy)
end
defp proxy(proxy) do
uri = URI.parse(proxy)
if uri.host && uri.port do
host = String.to_char_list(uri.host)
scheme = case uri.scheme do
"http" -> :proxy
"https" -> :https_proxy
end
:httpc.set_options([{scheme, {{host, uri.port}, []}}], :extika)
end
end
defp proxy_auth(url) do
# url scheme
url
|> get_scheme
|> get_proxy_url
|> get_proxy_auth_from_proxy_url()
end
defp get_scheme(url) do
cond do
String.starts_with?(url, "http://") -> :http
String.starts_with?(url, "https://") -> :https
end
end
defp get_proxy_url(:http) do
System.get_env("HTTP_PROXY") || System.get_env("http_proxy")
end
defp get_proxy_url(:https) do
System.get_env("HTTPS_PROXY") || System.get_env("https_proxy")
end
defp get_proxy_auth_from_proxy_url(nil), do: []
defp get_proxy_auth_from_proxy_url(proxy_url) do
parsed = URI.parse(proxy_url)
if parsed.userinfo do
[username, password] = String.split(parsed.userinfo, ":", parts: 2)
[proxy_auth: {to_charlist(username), to_charlist(password)}]
else
[]
end
end
# Verifies that the hash of a file matches what's expected
defp verify_checksum(path, expected) do
actual = hash_file(path)
if actual == expected do
:ok
else
{:error, """
Data does not match the given SHA-256 checksum.
Expected: #{expected}
Actual: #{actual}
"""}
end
end
# Hashes an input file in chunks
defp hash_file(path) do
File.stream!(path, [], 4 * 1024 * 1024)
|> Enum.reduce(:crypto.hash_init(:sha256), fn(chunk, acc) ->
:crypto.hash_update(acc, chunk)
end)
|> :crypto.hash_final
|> Base.encode16(case: :lower)
end
end
defmodule Mix.Tasks.Clean.Tika do
@shortdoc "Cleans any downloaded JAR files"
def run(_) do
version = File.read!(".tika-version")
|> ExTika.Mixfile.trim
names = [
"tika-#{version}.jar",
]
Enum.each(names, fn(name) ->
fpath = Path.join("priv", name)
if File.exists?(fpath) do
Mix.shell.info("Removing file: #{fpath}")
File.rm!(fpath)
end
end)
end
end
| 24.083682 | 80 | 0.605629 |
73014570480b1ba0721616164c843e533267556e | 2,271 | exs | Elixir | 20_prefix_compression.exs | Meyhem/hackerrank-fp-elixir | b1b3343d5f05152d37ba92f11955003602e4699b | [
"MIT"
] | null | null | null | 20_prefix_compression.exs | Meyhem/hackerrank-fp-elixir | b1b3343d5f05152d37ba92f11955003602e4699b | [
"MIT"
] | null | null | null | 20_prefix_compression.exs | Meyhem/hackerrank-fp-elixir | b1b3343d5f05152d37ba92f11955003602e4699b | [
"MIT"
] | null | null | null | # echo -e "pizseagtsgevlvwpbshxfwbjijtmslkufgofretehjgpcdedstydokejsnrccnkbruawgjmemfqhwmhjsykrejmmexgrsdlwtaybtnhfcapsylopkthboqkvicgwrvycphqnulnjgfgjlqnhdyfwweyifpxyxrvpnshjrrkmkwrvjjssublanosljhqhppmewuidyqblfcjzkwbhqlljvvcjqthbkpcoquepqzcvbkdlhufbkpiyaqbfblexry\npizseagtsgevlvwpbshxfwbjijtmslkufgofretehjgpcdedstydokejsnrccnkbruawgjmemfqhwmhjsykrejmmexgrsdlwtaybtnhfcapsylopkthboqkvicgcjkyxxubpbqcycioryppydvmwubupxrjlwrwujsiimvpbgkdugjbpqtodgbzzpsduewwovwlfrarfiuaztmmohagkjzzdcnjytamlzpwpfmhlgsdlwfbluxqllcjxbltbrlhgngvtxyksllhfhhzknesqggfazixcotpwwbjenbglhcbnhgjgyimfvfbcezatumthzrqtvdyirxulfhwylbdhkibmiagdgeerriszbnfxmoxgabaxkgkydrnrrbatjbavwdxjqirqivqweqdxeiiotbfidiemakmzbwszgrgekrlalsmbplumxeqmfrulddmbactyuhdljlhhjvxumpkyyzouybtwxtoyiffrderzmztkcdsbhcij" | elixir 20_prefix_compression.exs
defmodule Solution do
def read_float() do
case IO.gets "" do
line when is_binary(line) -> { :ok, String.to_float String.trim line }
:eof -> { :error }
{ :error, _} -> { :error }
end
end
def read_int() do
case IO.gets "" do
line when is_binary(line) -> { :ok, String.to_integer String.trim line }
:eof -> { :error }
{ :error, _} -> { :error }
end
end
def read_int_list() do
case IO.gets "" do
line when is_binary(line) -> {
:ok,
String.trim(line, "\n") |> String.split |> Enum.map(&String.to_integer/1)
}
:eof -> { :error }
{ :error, _} -> { :error }
end
end
def read_binary_line() do
case IO.gets "" do
line when is_binary(line) -> {
:ok,
String.trim(line, "\n")
}
:eof -> { :error }
{ :error, _} -> { :error }
end
end
def main() do
{ _, s1 } = read_binary_line()
{ _, s2 } = read_binary_line()
lcp = String.graphemes(s1)
|> Stream.zip(String.graphemes(s2))
|> Stream.take_while(fn { a, b } -> a == b end)
|> Enum.count()
IO.puts("#{lcp} " <> String.slice(s1, 0, lcp))
IO.puts("#{String.length(s1) - lcp} " <> String.slice(s1, lcp, String.length(s1)))
IO.puts("#{String.length(s2) - lcp} " <> String.slice(s2, lcp, String.length(s2)))
end
end
Solution.main
| 39.155172 | 805 | 0.667547 |
73017c666c1979d003eb1515b7c75c3137d25b01 | 1,012 | exs | Elixir | mix.exs | timjp87/elixir-libp2p | 19d1174c5614e6b3b0a1bbab21fee846f3bad11b | [
"MIT"
] | 7 | 2019-06-13T20:39:09.000Z | 2020-08-14T13:11:17.000Z | mix.exs | timjp87/elixir-libp2p | 19d1174c5614e6b3b0a1bbab21fee846f3bad11b | [
"MIT"
] | null | null | null | mix.exs | timjp87/elixir-libp2p | 19d1174c5614e6b3b0a1bbab21fee846f3bad11b | [
"MIT"
] | 1 | 2019-07-17T14:10:54.000Z | 2019-07-17T14:10:54.000Z | defmodule Libp2p.MixProject do
use Mix.Project
def project do
[
app: :libp2p,
version: "0.1.1",
elixir: "~> 1.8",
description: description(),
package: package(),
start_permanent: Mix.env() == :prod,
source_url: "https://github.com/timjp87/elixir-libp2p",
deps: deps()
]
end
defp description() do
"This package provides bindings for the go-libp2p-daemon, which needs to be installed and launched seperately."
end
defp package() do
[
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/timjp87/elixir-libp2p"}
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:protobuf, "~> 0.5.3"},
{:poolboy, "~> 1.5"},
{:basefiftyeight, "~> 0.1.0"},
{:multiaddr, "~> 1.1"},
{:ex_doc, ">= 0.0.0", only: :dev}
]
end
end
| 22 | 115 | 0.574111 |
730185396b04d82d8da13cf16ca41b29d8ddc24b | 7,150 | exs | Elixir | test/four_lucha/game_test.exs | Thomas-Jean/four_lucha | 591627059c02edc3315b5cac2c35eacb821108ff | [
"Apache-2.0"
] | 1 | 2021-02-21T19:15:27.000Z | 2021-02-21T19:15:27.000Z | test/four_lucha/game_test.exs | Thomas-Jean/four_lucha | 591627059c02edc3315b5cac2c35eacb821108ff | [
"Apache-2.0"
] | null | null | null | test/four_lucha/game_test.exs | Thomas-Jean/four_lucha | 591627059c02edc3315b5cac2c35eacb821108ff | [
"Apache-2.0"
] | null | null | null | defmodule FourLucha.GameTest do
use ExUnit.Case, async: true
use ExVCR.Mock, adapter: ExVCR.Adapter.Hackney, options: [clear_mock: true]
ExVCR.Config.filter_url_params(true)
setup_all do
HTTPoison.start()
end
setup do
{status, _cleared} = Cachex.clear(:gb_cache)
status
end
test "responses with a Game and Response struct" do
use_cassette "gb_get_game" do
response = FourLucha.Game.get(1)
assert elem(response, 0) == :ok
assert elem(response, 1).__struct__ == FourLucha.Resource.Game
assert elem(response, 2).__struct__ == FourLucha.Resource.Response
end
end
test "gets the game with the id that matches our request" do
use_cassette "gb_get_game" do
{status, game, req} = FourLucha.Game.get(1)
assert status == :ok
assert game.__struct__ == FourLucha.Resource.Game
assert game.id == 1
assert req.error == "OK"
assert req.status_code == 1
end
end
test "responses with an error if the game does not exist" do
use_cassette "gb_get_invalid_game" do
{status, game, req} = FourLucha.Game.get(10_000_000)
assert status == :error
assert game.guid == nil
assert req.error == "Object Not Found"
assert req.status_code == 101
end
end
test "responses with a list of Games when given query parameters as a map" do
use_cassette "gb_get_zelda_games" do
{status, games, req} = FourLucha.Game.get(%{filter: %{name: 'zelda'}})
assert status == :ok
assert is_list(games)
assert length(games) == 34
assert req.error == "OK"
assert req.status_code == 1
end
end
test "responses with a list of Games when given query parameters as a keyword list" do
use_cassette "gb_get_super_mario_games" do
{status, games, req} = FourLucha.Game.get(%{filter: [name: 'super', name: 'mario']})
assert status == :ok
assert is_list(games)
assert length(games) == 59
assert req.error == "OK"
assert req.status_code == 1
end
end
test "get games sorted in ascending order" do
use_cassette "gb_get_kingdom_hearts_sorted_asc" do
{status, games, req} =
FourLucha.Game.get(%{
filter: [name: "Kingdom Hearts"],
sort: [field: "date_last_updated", direction: "asc"]
})
dates = Enum.map(games, & &1.date_last_updated)
sorted_dates = Enum.sort(dates, &(&1 < &2))
same_order =
Enum.zip(dates, sorted_dates)
|> Enum.map(fn x -> elem(x, 0) == elem(x, 1) end)
|> Enum.reduce(true, fn x, acc ->
if acc == false do
false
else
acc == x
end
end)
assert same_order == true
assert status == :ok
assert is_list(games)
assert req.error == "OK"
assert req.status_code == 1
end
end
test "get games with sorted in descending order" do
use_cassette "gb_get_kingdom_hearts_sorted_desc" do
{status, games, req} =
FourLucha.Game.get(%{
filter: [name: "Kingdom Hearts"],
sort: [field: "date_last_updated", direction: "desc"]
})
dates = Enum.map(games, & &1.date_last_updated)
sorted_dates = Enum.sort(dates, &(&1 > &2))
same_order =
Enum.zip(dates, sorted_dates)
|> Enum.map(fn x -> elem(x, 0) == elem(x, 1) end)
|> Enum.reduce(true, fn x, acc ->
if acc == false do
false
else
acc == x
end
end)
assert same_order == true
assert status == :ok
assert is_list(games)
assert req.error == "OK"
assert req.status_code == 1
end
end
test "get games with a limit param" do
use_cassette "gb_get_50_games" do
{status, games, req} = FourLucha.Game.get(%{limit: "50"})
assert status == :ok
assert length(games) == 50
assert is_list(games)
assert req.error == "OK"
assert req.status_code == 1
end
end
test "get games using offset to move past a limit" do
use_cassette "gb_get_1_game_without_offset" do
{status, games, req} = FourLucha.Game.get(%{limit: 1})
assert status == :ok
assert length(games) == 1
assert hd(games).name == "Desert Strike: Return to the Gulf"
assert req.error == "OK"
assert req.status_code == 1
end
use_cassette "gb_get_1_game_with_offset" do
{status_offset, games_offset, req_offset} = FourLucha.Game.get(%{limit: 1, offset: 1})
assert status_offset == :ok
assert length(games_offset) == 1
assert hd(games_offset).name == "Breakfree"
assert req_offset.error == "OK"
assert req_offset.status_code == 1
end
end
test "get games by date range" do
use_cassette "gb_get_baffle_ball_by_date" do
{status, games, req} =
FourLucha.Game.get(%{
filter: %{original_release_date: %{start: "1900-01-01", end: "1931-12-31"}}
})
assert status == :ok
assert is_list(games)
assert length(games) == 1
assert hd(games).name == "Baffle Ball"
assert req.error == "OK"
assert req.status_code == 1
end
Cachex.clear(:gb_cache)
use_cassette "gb_get_baffle_ball_by_date" do
{status, games, req} =
FourLucha.Game.get(%{
filter: [original_release_date: %{start: "1900-01-01", end: "1931-12-31"}]
})
assert status == :ok
assert is_list(games)
assert length(games) == 1
assert hd(games).name == "Baffle Ball"
assert req.error == "OK"
assert req.status_code == 1
end
end
test "get games by start to now" do
use_cassette "gb_get_with_start" do
{status, games, req} =
FourLucha.Game.get(%{filter: %{original_release_date: %{start: "2019-08-31"}}})
assert status == :ok
assert is_list(games)
assert req.error == "OK"
assert req.status_code == 1
end
Cachex.clear(:gb_cache)
use_cassette "gb_get_with_start" do
{status, games, req} =
FourLucha.Game.get(%{filter: [original_release_date: %{start: "2019-08-31"}]})
assert status == :ok
assert is_list(games)
assert req.error == "OK"
assert req.status_code == 1
end
end
test "get games from cache on repeat" do
use_cassette "gb_get_zelda_games" do
{status, games, req} = FourLucha.Game.get(%{filter: %{name: 'zelda'}})
assert status == :ok
assert is_list(games)
assert length(games) == 34
assert req.error == "OK"
assert req.status_code == 1
end
use_cassette "gb_get_zelda_games" do
{status, games, req} = FourLucha.Game.get(%{filter: %{name: 'zelda'}})
assert status == :ok
assert is_list(games)
assert length(games) == 34
assert req == nil
end
end
test "get game from cache on repeat" do
use_cassette "gb_get_game" do
FourLucha.Game.get(1)
{status, game, req} = FourLucha.Game.get(1)
assert status == :ok
assert game.__struct__ == FourLucha.Resource.Game
assert game.id == 1
assert req == nil
end
end
end
| 28.373016 | 92 | 0.608811 |
7301cef289d088218915f4913ee43b13dbd136a1 | 2,725 | exs | Elixir | test/protocol/unpack_test.exs | desoulter/smppex | 1c8dbd9673291431b2d329a2cb20134c91857af2 | [
"MIT"
] | 49 | 2019-11-27T00:20:59.000Z | 2022-02-18T17:07:14.000Z | test/protocol/unpack_test.exs | desoulter/smppex | 1c8dbd9673291431b2d329a2cb20134c91857af2 | [
"MIT"
] | 62 | 2016-05-19T17:11:02.000Z | 2019-11-21T16:47:15.000Z | test/protocol/unpack_test.exs | desoulter/smppex | 1c8dbd9673291431b2d329a2cb20134c91857af2 | [
"MIT"
] | 25 | 2016-05-20T14:44:43.000Z | 2019-11-15T14:50:24.000Z | defmodule SMPPEX.Protocol.UnpackTest do
use ExUnit.Case
alias SMPPEX.Protocol.Unpack
test "integer" do
assert {:error, _} = Unpack.integer(<<>>, 1)
assert {:error, _} = Unpack.integer(<<1>>, 2)
assert {:error, _} = Unpack.integer(<<1, 2, 3>>, 4)
assert {:ok, 1, <<2, 3>>} == Unpack.integer(<<1, 2, 3>>, 1)
assert {:ok, 1, <<3>>} == Unpack.integer(<<0, 1, 3>>, 2)
assert {:ok, 1, <<5>>} == Unpack.integer(<<0, 0, 0, 1, 5>>, 4)
end
test "c_octet_string: fixed, ascii" do
assert {:ok, "", "rest"} == Unpack.c_octet_string(<<0, "rest">>, {:fixed, 1}, :ascii)
assert {:error, _} = Unpack.c_octet_string(<<"ab", 0>>, {:fixed, 4})
assert {:ok, "ab", "c"} == Unpack.c_octet_string(<<"ab", 0, "c">>, {:fixed, 3}, :ascii)
end
test "c_octet_string: fixed, hex" do
assert {:ok, "0123456789abcdefABCDEF", "c"} ==
Unpack.c_octet_string(<<"0123456789abcdefABCDEF", 0, "c">>, {:fixed, 23}, :hex)
assert {:error, _} =
Unpack.c_octet_string(<<"0123456789abXdefABCDEF", 0, "c">>, {:fixed, 23}, :hex)
end
test "c_octet_string: fixed, dec" do
assert {:ok, "0123456789", "c"} ==
Unpack.c_octet_string(<<"0123456789", 0, "c">>, {:fixed, 11}, :dec)
assert {:error, _} = Unpack.c_octet_string(<<"01234X6789", 0, "c">>, {:fixed, 11}, :dec)
end
test "c_octet_string: var max, ascii" do
assert {:error, _} = Unpack.c_octet_string(<<"hi">>, {:max, 1}, :ascii)
assert {:ok, "", "rest"} == Unpack.c_octet_string(<<0, "rest">>, {:max, 3}, :ascii)
assert {:ok, "ab", "c"} == Unpack.c_octet_string(<<"ab", 0, "c">>, {:max, 3}, :ascii)
assert {:ok, "ab", "c"} == Unpack.c_octet_string(<<"ab", 0, "c">>, {:max, 4}, :ascii)
end
test "c_octet_string: var max, hex" do
assert {:ok, "0123456789abcdefABCDEF", "c"} ==
Unpack.c_octet_string(<<"0123456789abcdefABCDEF", 0, "c">>, {:max, 23}, :hex)
assert {:error, _} =
Unpack.c_octet_string(<<"0123456789abXdefABCDEF", 0, "c">>, {:max, 23}, :hex)
end
test "c_octet_string: var max, dec" do
assert {:ok, "0123456789", "c"} ==
Unpack.c_octet_string(<<"0123456789", 0, "c">>, {:max, 11}, :dec)
assert {:error, _} = Unpack.c_octet_string(<<"01234F6789", 0, "c">>, {:max, 11}, :dec)
end
test "octet_string" do
assert {:ok, "", "123"} == Unpack.octet_string("123", 0)
assert {:ok, "12", "3"} == Unpack.octet_string("123", 2)
assert {:error, _} = Unpack.octet_string("123", 4)
end
test "tlv" do
assert {:error, _} = Unpack.tlv(<<0, 1, 0, 2, 0>>)
assert {:error, _} = Unpack.tlv(<<0, 1, 0, -1, 0>>)
assert {:ok, {1, <<3, 4>>}, <<5>>} == Unpack.tlv(<<0, 1, 0, 2, 3, 4, 5>>)
end
end
| 36.333333 | 92 | 0.550459 |
7301fac158253891bb00810599299b6b5a230ce2 | 966 | ex | Elixir | lib/spender_web/schema/wish_list_types.ex | LittleKidogo/Budgetinization | eae6dd62208ec7fb43c8c212f40611f8635205d5 | [
"MIT"
] | 2 | 2018-02-27T06:29:59.000Z | 2018-06-09T16:53:49.000Z | lib/spender_web/schema/wish_list_types.ex | LittleKidogo/Budgetinization | eae6dd62208ec7fb43c8c212f40611f8635205d5 | [
"MIT"
] | 94 | 2018-02-22T06:03:19.000Z | 2018-06-28T14:30:31.000Z | lib/spender_web/schema/wish_list_types.ex | LittleKidogo/Budgetinization | eae6dd62208ec7fb43c8c212f40611f8635205d5 | [
"MIT"
] | 1 | 2020-03-04T19:41:06.000Z | 2020-03-04T19:41:06.000Z | defmodule SpenderWeb.Schema.WishListTypes do
use Absinthe.Schema.Notation
@desc "A wishlist item"
object :wish_list_item do
field :location, :string
field :name, :string
field :price, :float
field :qpm, :integer
field :type, :string
field :id, :id
end
@desc "input object for wishlist items"
input_object :wish_list_items_input do
field :moneylog_id, non_null(:id)
end
@desc "inputs object to create a wishlist item"
input_object :wish_list_item_input do
field :moneylog_id, non_null(:id)
field :location, :string
field :name, non_null(:string)
field :price, non_null(:float)
field :qpm, :integer
field :type, :string
end
@desc "input object for updating an wishlist item"
input_object :wish_list_item_update_input do
field :location, :string
field :name, :string
field :price, :float
field :qpm, :integer
field :type, :string
field :id, non_null(:id)
end
end
| 24.769231 | 52 | 0.691511 |
7301fd0d9b1918bd093fe1993fb1068fd11a2db9 | 1,357 | exs | Elixir | config/test.exs | MNDL-27/midarr-server | b749707a1777205cea2d93349cde2ef922e527ec | [
"MIT"
] | null | null | null | config/test.exs | MNDL-27/midarr-server | b749707a1777205cea2d93349cde2ef922e527ec | [
"MIT"
] | null | null | null | config/test.exs | MNDL-27/midarr-server | b749707a1777205cea2d93349cde2ef922e527ec | [
"MIT"
] | null | null | null | import Config
# Only in tests, remove the complexity from the password hashing algorithm
config :bcrypt_elixir, :log_rounds, 1
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
config :media_server, MediaServer.Repo,
username: System.get_env("DB_USERNAME"),
password: System.get_env("DB_PASSWORD"),
database: System.get_env("DB_DATABASE"),
hostname: System.get_env("TEST_DB_HOSTNAME"),
pool_size: 10,
pool: Ecto.Adapters.SQL.Sandbox
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :media_server, MediaServerWeb.Endpoint,
http: [ip: {127, 0, 0, 1}, port: 4002],
secret_key_base: "62B5xZrG1FF9/9KoKGBTwSiiuq4aGoO/m2ZBORy8I1D/k4DXyi/khr1NBKmoFl0p",
server: false
# In test we don't send emails.
config :media_server, MediaServer.Mailer, adapter: Swoosh.Adapters.Test
# Print only warnings and errors during test
config :logger, level: :warn
# Initialize plugs at runtime for faster test compilation
config :phoenix, :plug_init_mode, :runtime
config :media_server,
movies_base_url: "radarr:7878",
movies_api_key: "d031e8c9b9df4b2fab311d1c3b3fa2c5",
series_base_url: "sonarr:8989",
series_api_key: "1accda4476394bfcaddefe8c4fd77d4a" | 34.794872 | 86 | 0.77745 |
730216823d729b7a7db942b6944b064083ed2f01 | 2,771 | ex | Elixir | api/lib/scrumpoker_web/channels/presence.ex | makao95/scrumpoker | de235a049a360612cde23ae5992375cb01a7feba | [
"MIT"
] | 2 | 2020-05-06T14:51:56.000Z | 2020-05-06T14:52:21.000Z | api/lib/scrumpoker_web/channels/presence.ex | makao95/scrumpoker | de235a049a360612cde23ae5992375cb01a7feba | [
"MIT"
] | 4 | 2020-11-18T13:52:06.000Z | 2021-06-17T00:36:52.000Z | api/lib/scrumpoker_web/channels/presence.ex | makao95/scrumpoker | de235a049a360612cde23ae5992375cb01a7feba | [
"MIT"
] | 1 | 2020-11-17T00:36:19.000Z | 2020-11-17T00:36:19.000Z | defmodule ScrumPokerWeb.Presence do
@moduledoc """
Provides presence tracking to channels and processes.
See the [`Phoenix.Presence`](http://hexdocs.pm/phoenix/Phoenix.Presence.html)
docs for more details.
## Usage
Presences can be tracked in your channel after joining:
defmodule ScrumPoker.MyChannel do
use ScrumPokerWeb, :channel
alias ScrumPokerWeb.Presence
def join("some:topic", _params, socket) do
send(self(), :after_join)
{:ok, assign(socket, :user_id, ...)}
end
def handle_info(:after_join, socket) do
push(socket, "presence_state", Presence.list(socket))
{:ok, _} = Presence.track(socket, socket.assigns.user_id, %{
online_at: inspect(System.system_time(:second))
})
{:noreply, socket}
end
end
In the example above, `Presence.track` is used to register this
channel's process as a presence for the socket's user ID, with
a map of metadata. Next, the current presence list for
the socket's topic is pushed to the client as a `"presence_state"` event.
Finally, a diff of presence join and leave events will be sent to the
client as they happen in real-time with the "presence_diff" event.
See `Phoenix.Presence.list/2` for details on the presence data structure.
## Fetching Presence Information
The `fetch/2` callback is triggered when using `list/1`
and serves as a mechanism to fetch presence information a single time,
before broadcasting the information to all channel subscribers.
This prevents N query problems and gives you a single place to group
isolated data fetching to extend presence metadata.
The function receives a topic and map of presences and must return a
map of data matching the Presence data structure:
%{"123" => %{metas: [%{status: "away", phx_ref: ...}],
"456" => %{metas: [%{status: "online", phx_ref: ...}]}
The `:metas` key must be kept, but you can extend the map of information
to include any additional information. For example:
def fetch(_topic, entries) do
users = entries |> Map.keys() |> Accounts.get_users_map(entries)
# => %{"123" => %{name: "User 123"}, "456" => %{name: nil}}
for {key, %{metas: metas}} <- entries, into: %{} do
{key, %{metas: metas, user: users[key]}}
end
end
The function above fetches all users from the database who
have registered presences for the given topic. The fetched
information is then extended with a `:user` key of the user's
information, while maintaining the required `:metas` field from the
original presence data.
"""
use Phoenix.Presence,
otp_app: :scrumpoker,
pubsub_server: ScrumPokerWeb.PubSub
end
| 36.946667 | 79 | 0.678455 |
7302342162539a8d33daa5ada71e6ab6164ca85c | 112 | exs | Elixir | test/durak_test.exs | mlensment/durak | d28296a230c12c5eb8c21bf8bd31eb384b2d4ddc | [
"MIT"
] | null | null | null | test/durak_test.exs | mlensment/durak | d28296a230c12c5eb8c21bf8bd31eb384b2d4ddc | [
"MIT"
] | null | null | null | test/durak_test.exs | mlensment/durak | d28296a230c12c5eb8c21bf8bd31eb384b2d4ddc | [
"MIT"
] | null | null | null | defmodule DurakTest do
use ExUnit.Case
doctest Durak
test "the truth" do
assert 1 + 1 == 2
end
end
| 12.444444 | 22 | 0.660714 |
730256a57f8d9a7a45eb943ea7729344b2165174 | 2,061 | exs | Elixir | test/tiptap/learnables_test.exs | EasterPeanut/tiptap-editor | cce7766925308fbebf8fc486e4f7609397c0d1fb | [
"MIT"
] | 3 | 2021-08-29T03:06:05.000Z | 2022-01-13T09:29:38.000Z | test/tiptap/learnables_test.exs | EasterPeanut/tiptap-editor | cce7766925308fbebf8fc486e4f7609397c0d1fb | [
"MIT"
] | 15 | 2021-08-23T18:02:30.000Z | 2021-08-30T07:32:38.000Z | test/tiptap/learnables_test.exs | EasterPeanut/tiptap-editor | cce7766925308fbebf8fc486e4f7609397c0d1fb | [
"MIT"
] | null | null | null | defmodule Tiptap.LearnablesTest do
use Tiptap.DataCase
alias Tiptap.Learnables
describe "articles" do
alias Tiptap.Learnables.Article
import Tiptap.LearnablesFixtures
@invalid_attrs %{content: nil, title: nil}
test "list_articles/0 returns all articles" do
article = article_fixture()
assert Learnables.list_articles() == [article]
end
test "get_article!/1 returns the article with given id" do
article = article_fixture()
assert Learnables.get_article!(article.id) == article
end
test "create_article/1 with valid data creates an article" do
valid_attrs = %{content: %{}, title: "some title"}
assert {:ok, %Article{} = article} = Learnables.create_article(valid_attrs)
assert article.content == %{}
assert article.title == "some title"
end
test "create_article/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Learnables.create_article(@invalid_attrs)
end
test "update_article/2 with valid data updates the article" do
article = article_fixture()
update_attrs = %{content: %{}, title: "some updated title"}
assert {:ok, %Article{} = article} = Learnables.update_article(article, update_attrs)
assert article.content == %{}
assert article.title == "some updated title"
end
test "update_article/2 with invalid data returns error changeset" do
article = article_fixture()
assert {:error, %Ecto.Changeset{}} = Learnables.update_article(article, @invalid_attrs)
assert article == Learnables.get_article!(article.id)
end
test "delete_article/1 deletes the article" do
article = article_fixture()
assert {:ok, %Article{}} = Learnables.delete_article(article)
assert_raise Ecto.NoResultsError, fn -> Learnables.get_article!(article.id) end
end
test "change_article/1 returns an article changeset" do
article = article_fixture()
assert %Ecto.Changeset{} = Learnables.change_article(article)
end
end
end
| 33.241935 | 93 | 0.692868 |
730257f07767cd97137d82a4c50d5fded347adf0 | 5,472 | ex | Elixir | apps/game_server/lib/game_server/pong/pong_game_state.ex | powelli13/dara-dots | 728a622e761867ff114c9dbdbf57ff76ab0fb992 | [
"MIT"
] | 6 | 2021-03-25T21:02:28.000Z | 2022-02-10T08:11:28.000Z | apps/game_server/lib/game_server/pong/pong_game_state.ex | powelli13/dara-dots | 728a622e761867ff114c9dbdbf57ff76ab0fb992 | [
"MIT"
] | 20 | 2020-09-02T23:17:29.000Z | 2022-03-27T22:54:20.000Z | apps/game_server/lib/game_server/pong/pong_game_state.ex | powelli13/dara-dots | 728a622e761867ff114c9dbdbf57ff76ab0fb992 | [
"MIT"
] | 2 | 2021-03-25T21:08:15.000Z | 2021-09-25T19:11:36.000Z | defmodule GameServer.PongGameState do
alias __MODULE__
@paddle_right_limit 0.9
@paddle_left_limit 0.0
@paddle_move_step 0.03
# The paddle width is ten percent
# the front end rendering must match this
@paddle_width 0.1
@starting_ball_x 0.5
@starting_ball_y 0.5
@starting_ball_speed 0.02
@starting_ball_x_step 0.05
@starting_ball_y_step 0.05
defstruct ball_x: @starting_ball_x,
ball_y: @starting_ball_y,
ball_speed: @starting_ball_speed,
# Theta here is in degrees and is converted when used
ball_theta: 90,
ball_x_step: @starting_ball_x_step,
ball_y_step: @starting_ball_y_step,
ball_moving: false,
top_paddle_x: 0.4,
bot_paddle_x: 0.4,
top_player_score: 0,
bot_player_score: 0
def move_top_paddle(state = %PongGameState{}, direction)
when is_atom(direction) do
new_paddle_x = adjust_paddle_x(state.top_paddle_x, direction)
%PongGameState{state | top_paddle_x: new_paddle_x}
end
def move_bottom_paddle(state = %PongGameState{}, direction)
when is_atom(direction) do
new_paddle_x = adjust_paddle_x(state.bot_paddle_x, direction)
%PongGameState{state | bot_paddle_x: new_paddle_x}
end
defp adjust_paddle_x(paddle_x, direction) do
case direction do
:left ->
if paddle_x >= @paddle_left_limit do
paddle_x - @paddle_move_step
else
paddle_x
end
:right ->
if paddle_x <= @paddle_right_limit do
paddle_x + @paddle_move_step
else
paddle_x
end
end
end
defp degrees_to_radians(degrees) do
degrees * :math.pi() / 180
end
# Returns a tuple of game state struct and bool which indicates if the ball
# should be reset.
# This is because the message mailbox and timers are in the GenServer which
# uses this struct.
def move_ball(state = %PongGameState{}) do
# Only calculate and update if the ball is set to move
if not state.ball_moving do
{state, false}
else
calculate_and_move_ball(state)
end
end
defp calculate_and_move_ball(state = %PongGameState{}) do
# check collisions
{new_theta, player_scored} = check_collisions_and_calculate_theta(state)
if new_theta == :reset do
{state
|> score_goal(player_scored)
|> reset_ball_position_and_speed, true}
else
# recalculate x and y step
radians = degrees_to_radians(new_theta)
new_ball_x_step = state.ball_speed * :math.cos(radians)
new_ball_y_step = state.ball_speed * :math.sin(radians)
# move ball
new_ball_x = state.ball_x + new_ball_x_step
new_ball_y = state.ball_y + new_ball_y_step
{%PongGameState{
state
| ball_x: new_ball_x,
ball_y: new_ball_y,
ball_x_step: new_ball_x_step,
ball_y_step: new_ball_y_step,
ball_theta: new_theta
}, false}
end
end
def start_ball_moving(state = %PongGameState{}) do
%{state | ball_moving: true}
end
defp check_collisions_and_calculate_theta(state = %PongGameState{}) do
cond do
collide_left?(state.ball_x) ->
{reflect_left_wall(state.ball_theta), :no_score}
collide_right?(state.ball_x) ->
{reflect_right_wall(state.ball_theta), :no_score}
collide_bottom_paddle?(state) ->
{reflect_paddle(state.ball_theta), :no_score}
collide_top_paddle?(state) ->
{reflect_paddle(state.ball_theta), :no_score}
collide_top_goal?(state) ->
{:reset, :bot_scored}
collide_bottom_goal?(state) ->
{:reset, :top_scored}
true ->
{state.ball_theta, :no_score}
end
end
defp collide_left?(ball_x), do: ball_x <= 0.00
defp collide_right?(ball_x), do: ball_x >= 1.00
defp collide_bottom_paddle?(state) do
state.ball_y >= 0.95 &&
state.ball_x >= state.bot_paddle_x &&
state.ball_x <= state.bot_paddle_x + @paddle_width
end
defp collide_top_paddle?(state) do
state.ball_y <= 0.05 &&
state.ball_x >= state.top_paddle_x &&
state.ball_x <= state.top_paddle_x + @paddle_width
end
# Top is lower numbers since it's near the top of the screen
# in the Phaser framework used
defp collide_top_goal?(state) do
state.ball_y <= 0.05
end
defp collide_bottom_goal?(state) do
state.ball_y >= 0.95
end
defp reflect_left_wall(theta) do
180 - theta
end
defp reflect_right_wall(theta) do
180 - theta
end
defp reflect_paddle(theta) do
360 - theta
end
defp get_random_starting_theta() do
Enum.concat(45..135, 225..315) |> Enum.random()
end
defp score_goal(state = %PongGameState{}, player_scored) do
case player_scored do
:top_scored ->
%PongGameState{
state
| top_player_score: state.top_player_score + 1
}
:bot_scored ->
%PongGameState{
state
| bot_player_score: state.bot_player_score + 1
}
end
end
def reset_ball_position_and_speed(state = %PongGameState{}) do
%PongGameState{
state
| ball_x: @starting_ball_x,
ball_y: @starting_ball_y,
ball_speed: @starting_ball_speed,
ball_theta: get_random_starting_theta(),
ball_x_step: @starting_ball_x_step,
ball_y_step: @starting_ball_y_step,
ball_moving: false
}
end
end
| 26.181818 | 77 | 0.656067 |
730260477009c626f67da51a8d5beb75d4626be4 | 2,554 | ex | Elixir | lib/huffman/packer.ex | Tmw/huffman | 837a6d4e13f88d7a5e23998edff4d172523c33f3 | [
"MIT"
] | 5 | 2020-09-06T18:29:51.000Z | 2021-03-13T13:38:34.000Z | lib/huffman/packer.ex | Tmw/huffman | 837a6d4e13f88d7a5e23998edff4d172523c33f3 | [
"MIT"
] | null | null | null | lib/huffman/packer.ex | Tmw/huffman | 837a6d4e13f88d7a5e23998edff4d172523c33f3 | [
"MIT"
] | 1 | 2020-09-06T18:29:54.000Z | 2020-09-06T18:29:54.000Z | defmodule Huffman.Packer do
alias Huffman.{Codebook, Tree}
@moduledoc """
This module will take the encoded tree and data and pack it into a single
binary that has the following format:
+---------+----------+----------+----------+
| header | tree | data | padding |
+---------+----------+----------+----------+
| 35 bits | variable | variable | < 8 bits |
+---------+----------+----------+----------+
The first 35 bits is reserved for the header. The header describes the layout
of the rest of the packet. The header itself is made up of two parts:
- The first 32 bits describe the length of the tree. This is the number of bits
following the header that contain the tree.
- The last 3 bits describe the padding at the end of the binary and can be
safely ignored when reading the blob. Its use is purely to round the number
of bits to the nearest binary (multiple of 8). Required to write to disk for example.
"""
@header_size 35
@type packed_data :: binary()
@doc """
pack/2 takes the serialized tree and encoded data and returns padded bitstring
to form binary.
"""
@spec pack(Tree.serialized_tree(), Codebook.encoded_data()) :: packed_data()
def pack(tree, encoded) do
tree_size = bit_size(tree)
data_size = bit_size(encoded)
# from the total we need to calculate the padding we'd need to add
# at the end of the bitstream to land on a valid binary (multiple of 8).
total_size = tree_size + data_size + @header_size
padding_size = 8 - rem(total_size, 8)
<<
make_header(tree_size, padding_size)::bitstring(),
tree::bitstring(),
encoded::bitstring(),
0::size(padding_size)
>>
end
@spec unpack(packed_data()) ::
{:ok, Tree.serialized_tree(), Codebook.encoded_data()}
| {:error, :invalid_binary}
def unpack(packed_data) when is_binary(packed_data) do
with {:ok, tree, data} <- deconstruct_binary(packed_data) do
{:ok, tree, data}
else
_ ->
{:error, :invalid_binary}
end
end
defp deconstruct_binary(packed_data) do
with <<tree_size::size(32), padding::size(3), rest::bitstring()>> <- packed_data,
<<tree::bitstring-size(tree_size), rest::bitstring()>> <- rest,
data_length <- bit_size(rest) - padding,
<<data::bitstring-size(data_length), _rest::bitstring>> <- rest do
{:ok, tree, data}
end
end
defp make_header(tree_size, padding_size) do
<<
tree_size::size(32),
padding_size::size(3)
>>
end
end
| 31.925 | 87 | 0.628426 |
73028d4fd6ae4ddfa77138f7a3d451f42c57fb37 | 11,927 | exs | Elixir | test/mix/tasks/phx.gen.json_test.exs | nickurban/phoenix | 116a0d4660248a09886e80da5e36dc6e395723d5 | [
"MIT"
] | 7 | 2021-01-31T04:51:08.000Z | 2022-01-09T06:59:28.000Z | test/mix/tasks/phx.gen.json_test.exs | nickurban/phoenix | 116a0d4660248a09886e80da5e36dc6e395723d5 | [
"MIT"
] | 2 | 2022-02-19T07:30:25.000Z | 2022-02-27T14:12:26.000Z | test/mix/tasks/phx.gen.json_test.exs | nickurban/phoenix | 116a0d4660248a09886e80da5e36dc6e395723d5 | [
"MIT"
] | 2 | 2021-02-06T08:40:23.000Z | 2021-03-20T16:35:47.000Z | Code.require_file "../../../installer/test/mix_helper.exs", __DIR__
defmodule Mix.Tasks.Phx.Gen.JsonTest do
use ExUnit.Case
import MixHelper
alias Mix.Tasks.Phx.Gen
setup do
Mix.Task.clear()
:ok
end
test "invalid mix arguments", config do
in_tmp_project config.test, fn ->
assert_raise Mix.Error, ~r/Expected the context, "blog", to be a valid module name/, fn ->
Gen.Json.run(~w(blog Post posts title:string))
end
assert_raise Mix.Error, ~r/Expected the schema, "posts", to be a valid module name/, fn ->
Gen.Json.run(~w(Post posts title:string))
end
assert_raise Mix.Error, ~r/The context and schema should have different names/, fn ->
Gen.Json.run(~w(Blog Blog blogs))
end
assert_raise Mix.Error, ~r/Invalid arguments/, fn ->
Gen.Json.run(~w(Blog.Post posts))
end
assert_raise Mix.Error, ~r/Invalid arguments/, fn ->
Gen.Json.run(~w(Blog Post))
end
end
end
test "generates json resource", config do
one_day_in_seconds = 24 * 3600
naive_datetime =
%{NaiveDateTime.utc_now() | second: 0, microsecond: {0, 6}}
|> NaiveDateTime.add(-one_day_in_seconds)
datetime =
%{DateTime.utc_now() | second: 0, microsecond: {0, 6}}
|> DateTime.add(-one_day_in_seconds)
in_tmp_project config.test, fn ->
Gen.Json.run(~w(Blog Post posts title slug:unique votes:integer cost:decimal
tags:array:text popular:boolean drafted_at:datetime
params:map
published_at:utc_datetime
published_at_usec:utc_datetime_usec
deleted_at:naive_datetime
deleted_at_usec:naive_datetime_usec
alarm:time
alarm_usec:time_usec
secret:uuid:redact announcement_date:date
weight:float user_id:references:users))
assert_file "lib/phoenix/blog/post.ex"
assert_file "lib/phoenix/blog.ex"
assert_file "test/phoenix/blog_test.exs", fn file ->
assert file =~ "use Phoenix.DataCase"
end
assert_file "test/phoenix_web/controllers/post_controller_test.exs", fn file ->
assert file =~ "defmodule PhoenixWeb.PostControllerTest"
assert file =~ """
assert %{
"id" => ^id,
"alarm" => "14:00:00",
"alarm_usec" => "14:00:00.000000",
"announcement_date" => "#{Date.add(Date.utc_today(), -1)}",
"cost" => "120.5",
"deleted_at" => "#{naive_datetime |> NaiveDateTime.truncate(:second) |> NaiveDateTime.to_iso8601()}",
"deleted_at_usec" => "#{NaiveDateTime.to_iso8601(naive_datetime)}",
"drafted_at" => "#{datetime |> NaiveDateTime.truncate(:second) |> NaiveDateTime.to_iso8601()}",
"params" => %{},
"popular" => true,
"published_at" => "#{datetime |> DateTime.truncate(:second) |> DateTime.to_iso8601()}",
"published_at_usec" => "#{DateTime.to_iso8601(datetime)}",
"secret" => "7488a646-e31f-11e4-aace-600308960662",
"slug" => "some slug",
"tags" => [],
"title" => "some title",
"votes" => 42,
"weight" => 120.5
} = json_response(conn, 200)["data"]
"""
end
assert [_] = Path.wildcard("priv/repo/migrations/*_create_posts.exs")
assert_file "lib/phoenix_web/controllers/fallback_controller.ex", fn file ->
assert file =~ "defmodule PhoenixWeb.FallbackController"
end
assert_file "lib/phoenix_web/controllers/post_controller.ex", fn file ->
assert file =~ "defmodule PhoenixWeb.PostController"
assert file =~ "use PhoenixWeb, :controller"
assert file =~ "Blog.get_post!"
assert file =~ "Blog.list_posts"
assert file =~ "Blog.create_post"
assert file =~ "Blog.update_post"
assert file =~ "Blog.delete_post"
assert file =~ " Routes.post_path(conn"
end
assert_receive {:mix_shell, :info, ["""
Add the resource to your :api scope in lib/phoenix_web/router.ex:
resources "/posts", PostController, except: [:new, :edit]
"""]}
end
end
test "generates into existing context without prompt with --merge-with-existing-context", config do
in_tmp_project config.test, fn ->
Gen.Json.run(~w(Blog Post posts title))
assert_file "lib/phoenix/blog.ex", fn file ->
assert file =~ "def get_post!"
assert file =~ "def list_posts"
assert file =~ "def create_post"
assert file =~ "def update_post"
assert file =~ "def delete_post"
assert file =~ "def change_post"
end
Gen.Json.run(~w(Blog Comment comments message:string --merge-with-existing-context))
refute_received {:mix_shell, :info, ["You are generating into an existing context" <> _notice]}
assert_file "lib/phoenix/blog.ex", fn file ->
assert file =~ "def get_comment!"
assert file =~ "def list_comments"
assert file =~ "def create_comment"
assert file =~ "def update_comment"
assert file =~ "def delete_comment"
assert file =~ "def change_comment"
end
end
end
test "when more than 50 arguments are given", config do
in_tmp_project config.test, fn ->
long_attribute_list = 0..55 |> Enum.map(&("attribute#{&1}:string")) |> Enum.join(" ")
Gen.Json.run(~w(Blog Post posts #{long_attribute_list}))
assert_file "test/phoenix_web/controllers/post_controller_test.exs", fn file ->
refute file =~ "...}"
end
end
end
test "with json --web namespace generates namedspaced web modules and directories", config do
in_tmp_project config.test, fn ->
Gen.Json.run(~w(Blog Post posts title:string --web Blog))
assert_file "test/phoenix_web/controllers/blog/post_controller_test.exs", fn file ->
assert file =~ "defmodule PhoenixWeb.Blog.PostControllerTest"
assert file =~ " Routes.blog_post_path(conn"
end
assert_file "lib/phoenix_web/controllers/blog/post_controller.ex", fn file ->
assert file =~ "defmodule PhoenixWeb.Blog.PostController"
assert file =~ "use PhoenixWeb, :controller"
assert file =~ " Routes.blog_post_path(conn"
end
assert_file "lib/phoenix_web/views/blog/post_view.ex", fn file ->
assert file =~ "defmodule PhoenixWeb.Blog.PostView"
end
assert_receive {:mix_shell, :info, ["""
Add the resource to your Blog :api scope in lib/phoenix_web/router.ex:
scope "/blog", PhoenixWeb.Blog, as: :blog do
pipe_through :api
...
resources "/posts", PostController
end
"""]}
end
end
test "with --no-context skips context and schema file generation", config do
in_tmp_project config.test, fn ->
Gen.Json.run(~w(Blog Comment comments title:string --no-context))
refute_file "lib/phoenix/blog.ex"
refute_file "lib/phoenix/blog/comment.ex"
assert Path.wildcard("priv/repo/migrations/*.exs") == []
assert_file "test/phoenix_web/controllers/comment_controller_test.exs", fn file ->
assert file =~ "defmodule PhoenixWeb.CommentControllerTest"
end
assert_file "lib/phoenix_web/controllers/comment_controller.ex", fn file ->
assert file =~ "defmodule PhoenixWeb.CommentController"
assert file =~ "use PhoenixWeb, :controller"
end
assert_file "lib/phoenix_web/views/comment_view.ex", fn file ->
assert file =~ "defmodule PhoenixWeb.CommentView"
end
end
end
test "with --no-context no warning is emitted when context exists", config do
in_tmp_project config.test, fn ->
Gen.Json.run(~w(Blog Post posts title:string))
assert_file "lib/phoenix/blog.ex"
assert_file "lib/phoenix/blog/post.ex"
Gen.Json.run(~w(Blog Comment comments title:string --no-context))
refute_received {:mix_shell, :info, ["You are generating into an existing context" <> _]}
assert_file "test/phoenix_web/controllers/comment_controller_test.exs", fn file ->
assert file =~ "defmodule PhoenixWeb.CommentControllerTest"
end
assert_file "lib/phoenix_web/controllers/comment_controller.ex", fn file ->
assert file =~ "defmodule PhoenixWeb.CommentController"
assert file =~ "use PhoenixWeb, :controller"
end
assert_file "lib/phoenix_web/views/comment_view.ex", fn file ->
assert file =~ "defmodule PhoenixWeb.CommentView"
end
end
end
test "with --no-schema skips schema file generation", config do
in_tmp_project config.test, fn ->
Gen.Json.run(~w(Blog Comment comments title:string --no-schema))
assert_file "lib/phoenix/blog.ex"
refute_file "lib/phoenix/blog/comment.ex"
assert Path.wildcard("priv/repo/migrations/*.exs") == []
assert_file "test/phoenix_web/controllers/comment_controller_test.exs", fn file ->
assert file =~ "defmodule PhoenixWeb.CommentControllerTest"
end
assert_file "lib/phoenix_web/controllers/comment_controller.ex", fn file ->
assert file =~ "defmodule PhoenixWeb.CommentController"
assert file =~ "use PhoenixWeb, :controller"
end
assert_file "lib/phoenix_web/views/comment_view.ex", fn file ->
assert file =~ "defmodule PhoenixWeb.CommentView"
end
end
end
describe "inside umbrella" do
test "without context_app generators config uses web dir", config do
in_tmp_umbrella_project config.test, fn ->
Gen.Json.run(~w(Accounts User users name:string))
assert_file "lib/phoenix/accounts.ex"
assert_file "lib/phoenix/accounts/user.ex"
assert_file "lib/phoenix_web/controllers/user_controller.ex", fn file ->
assert file =~ "defmodule PhoenixWeb.UserController"
assert file =~ "use PhoenixWeb, :controller"
end
assert_file "lib/phoenix_web/views/user_view.ex", fn file ->
assert file =~ "defmodule PhoenixWeb.UserView"
end
assert_file "test/phoenix_web/controllers/user_controller_test.exs", fn file ->
assert file =~ "defmodule PhoenixWeb.UserControllerTest"
end
end
end
test "raises with false context_app", config do
in_tmp_umbrella_project config.test, fn ->
Application.put_env(:phoenix, :generators, context_app: false)
assert_raise Mix.Error, ~r/no context_app configured/, fn ->
Gen.Json.run(~w(Accounts User users name:string))
end
end
end
test "with context_app generators config does not use web dir", config do
in_tmp_umbrella_project config.test, fn ->
File.mkdir!("another_app")
Application.put_env(:phoenix, :generators, context_app: {:another_app, "another_app"})
Gen.Json.run(~w(Accounts User users name:string))
assert_file "another_app/lib/another_app/accounts.ex"
assert_file "another_app/lib/another_app/accounts/user.ex"
assert_file "lib/phoenix/controllers/user_controller.ex", fn file ->
assert file =~ "defmodule Phoenix.UserController"
assert file =~ "use Phoenix, :controller"
end
assert_file "lib/phoenix/views/user_view.ex", fn file ->
assert file =~ "defmodule Phoenix.UserView"
end
assert_file "test/phoenix/controllers/user_controller_test.exs", fn file ->
assert file =~ "defmodule Phoenix.UserControllerTest"
end
end
end
end
end
| 37.271875 | 124 | 0.627148 |
73029c56f6150908bda34d4c2918efde3e760ab6 | 409 | ex | Elixir | test/support/case.ex | dhirajbajaj/link_preview | b500c27b0244ec64563cae80a078c94adb12dec8 | [
"Apache-2.0"
] | null | null | null | test/support/case.ex | dhirajbajaj/link_preview | b500c27b0244ec64563cae80a078c94adb12dec8 | [
"Apache-2.0"
] | null | null | null | test/support/case.ex | dhirajbajaj/link_preview | b500c27b0244ec64563cae80a078c94adb12dec8 | [
"Apache-2.0"
] | null | null | null | defmodule LinkPreview.Case do
use ExUnit.CaseTemplate
using do
quote do
@httparrot "http://localhost:#{Application.get_env(:httparrot, :http_port)}"
@opengraph File.read!("test/support/fixtures/opengraph_example.html")
@html File.read!("test/support/fixtures/html_example.html")
@image_spam File.read!("test/support/fixtures/html_image_spam_example.html")
end
end
end
| 27.266667 | 82 | 0.723716 |
7302dce54528bf689b8565739656277314f4aa6c | 16,797 | exs | Elixir | test/redix_test.exs | procore/redix | 322209c6bb176953e76d190c42889687780badaf | [
"MIT"
] | null | null | null | test/redix_test.exs | procore/redix | 322209c6bb176953e76d190c42889687780badaf | [
"MIT"
] | null | null | null | test/redix_test.exs | procore/redix | 322209c6bb176953e76d190c42889687780badaf | [
"MIT"
] | null | null | null | defmodule RedixTest do
use ExUnit.Case, async: true
import ExUnit.CaptureLog
alias Redix.{
ConnectionError,
Error
}
setup_all do
{:ok, conn} = Redix.start_link()
Redix.command!(conn, ["FLUSHALL"])
Redix.stop(conn)
:ok
end
describe "start_link/2" do
test "specifying a database" do
{:ok, c} = Redix.start_link(database: 1)
assert Redix.command(c, ~w(SET my_key my_value)) == {:ok, "OK"}
# Let's check we didn't write to the default database (which is 0).
{:ok, c} = Redix.start_link()
assert Redix.command(c, ~w(GET my_key)) == {:ok, nil}
end
test "specifying a non existing database" do
capture_log(fn ->
Process.flag(:trap_exit, true)
{:ok, pid} = Redix.start_link(database: 1000)
assert_receive {:EXIT, ^pid, %Error{message: message}}, 500
assert message in ["ERR invalid DB index", "ERR DB index is out of range"]
end)
end
test "specifying a password when no password is set" do
capture_log(fn ->
Process.flag(:trap_exit, true)
{:ok, pid} = Redix.start_link(password: "foo")
error = %Error{message: "ERR Client sent AUTH, but no password is set"}
assert_receive {:EXIT, ^pid, ^error}, 500
end)
end
test "specifying a password when a password is set" do
{:ok, pid} = Redix.start_link(port: 16379, password: "some-password")
assert Redix.command(pid, ["PING"]) == {:ok, "PONG"}
end
test "when unable to connect to Redis with sync_connect: true" do
capture_log(fn ->
Process.flag(:trap_exit, true)
error = %Redix.ConnectionError{reason: :nxdomain}
assert Redix.start_link(host: "nonexistent", sync_connect: true) == {:error, error}
assert_receive {:EXIT, _pid, ^error}, 1000
end)
end
test "when unable to connect to Redis with sync_connect: false" do
capture_log(fn ->
Process.flag(:trap_exit, true)
{:ok, pid} = Redix.start_link(host: "nonexistent", sync_connect: false)
refute_receive {:EXIT, ^pid, :nxdomain}, 200
end)
end
test "using a redis:// url" do
{:ok, pid} = Redix.start_link("redis://localhost:6379/3")
assert Redix.command(pid, ["PING"]) == {:ok, "PONG"}
end
test "using a rediss:// url" do
{:ok, pid} = Redix.start_link("rediss://localhost:6384/3")
assert Redix.command(pid, ["PING"]) == {:ok, "PONG"}
end
test "name registration" do
{:ok, pid} = Redix.start_link(name: :redix_server)
assert Process.whereis(:redix_server) == pid
assert Redix.command(:redix_server, ["PING"]) == {:ok, "PONG"}
end
test "passing options along with a Redis URI" do
{:ok, pid} = Redix.start_link("redis://localhost", name: :redix_uri)
assert Process.whereis(:redix_uri) == pid
end
end
test "child_spec/1" do
default_spec = %{
id: Redix,
start: {Redix, :start_link, []},
type: :worker
}
args_path = [:start, Access.elem(2)]
assert Redix.child_spec("redis://localhost") ==
put_in(default_spec, args_path, ["redis://localhost"])
assert Redix.child_spec([]) == put_in(default_spec, args_path, [[]])
assert Redix.child_spec(name: :redix) == put_in(default_spec, args_path, [[name: :redix]])
assert Redix.child_spec({"redis://localhost", name: :redix}) ==
put_in(default_spec, args_path, ["redis://localhost", [name: :redix]])
end
describe "stop/1" do
test "stops the connection" do
{:ok, pid} = Redix.start_link()
ref = Process.monitor(pid)
assert Redix.stop(pid) == :ok
assert_receive {:DOWN, ^ref, _, _, :normal}, 500
end
test "closes the socket as well" do
{:ok, pid} = Redix.start_link(sync_connect: true)
# This is a hack to get the socket. If I'll have a better idea, good for me :).
{_, data} = :sys.get_state(pid)
assert Port.info(data.socket) != nil
assert Redix.stop(pid) == :ok
assert Port.info(data.socket) == nil
end
end
describe "command/2" do
setup :connect
test "PING", %{conn: c} do
assert Redix.command(c, ["PING"]) == {:ok, "PONG"}
end
test "transactions - MULTI/EXEC", %{conn: c} do
assert Redix.command(c, ["MULTI"]) == {:ok, "OK"}
assert Redix.command(c, ["INCR", "multifoo"]) == {:ok, "QUEUED"}
assert Redix.command(c, ["INCR", "multibar"]) == {:ok, "QUEUED"}
assert Redix.command(c, ["INCRBY", "multifoo", 4]) == {:ok, "QUEUED"}
assert Redix.command(c, ["EXEC"]) == {:ok, [1, 1, 5]}
end
test "transactions - MULTI/DISCARD", %{conn: c} do
Redix.command!(c, ["SET", "discarding", "foo"])
assert Redix.command(c, ["MULTI"]) == {:ok, "OK"}
assert Redix.command(c, ["SET", "discarding", "bar"]) == {:ok, "QUEUED"}
# Discarding
assert Redix.command(c, ["DISCARD"]) == {:ok, "OK"}
assert Redix.command(c, ["GET", "discarding"]) == {:ok, "foo"}
end
test "Lua scripting - EVAL", %{conn: c} do
script = """
redis.call("SET", "evalling", "yes")
return {KEYS[1],ARGV[1],ARGV[2]}
"""
cmds = ["eval", script, "1", "key", "first", "second"]
assert Redix.command(c, cmds) == {:ok, ["key", "first", "second"]}
assert Redix.command(c, ["GET", "evalling"]) == {:ok, "yes"}
end
test "command/2 - Lua scripting: SCRIPT LOAD, SCRIPT EXISTS, EVALSHA", %{conn: c} do
script = """
return 'hello world'
"""
{:ok, sha} = Redix.command(c, ["SCRIPT", "LOAD", script])
assert is_binary(sha)
assert Redix.command(c, ["SCRIPT", "EXISTS", sha, "foo"]) == {:ok, [1, 0]}
# Eval'ing the script
assert Redix.command(c, ["EVALSHA", sha, 0]) == {:ok, "hello world"}
end
test "Redis errors", %{conn: c} do
{:ok, _} = Redix.command(c, ~w(SET errs foo))
message = "ERR value is not an integer or out of range"
assert Redix.command(c, ~w(INCR errs)) == {:error, %Redix.Error{message: message}}
end
test "passing an empty list returns an error", %{conn: c} do
message = "got an empty command ([]), which is not a valid Redis command"
assert_raise ArgumentError, message, fn -> Redix.command(c, []) end
end
test "timeout", %{conn: c} do
assert {:error, %ConnectionError{reason: :timeout}} = Redix.command(c, ~W(PING), timeout: 0)
end
test "Redix process crashes while waiting", %{conn: conn} do
Process.flag(:trap_exit, true)
pid =
spawn_link(fn ->
Redix.command(conn, ~w(BLPOP mid_command_disconnection 0))
end)
# We sleep to allow the task to issue the command to Redix.
Process.sleep(100)
Process.exit(conn, :kill)
assert_receive {:EXIT, ^conn, :killed}
assert_receive {:EXIT, ^pid, :killed}
end
test "passing a non-list as the command", %{conn: c} do
message = "expected a list of binaries as each Redis command, got: \"PING\""
assert_raise ArgumentError, message, fn ->
Redix.command(c, "PING")
end
end
end
describe "pipeline/2" do
setup :connect
test "basic interaction", %{conn: c} do
commands = [
["SET", "pipe", "10"],
["INCR", "pipe"],
["GET", "pipe"]
]
assert Redix.pipeline(c, commands) == {:ok, ["OK", 11, "11"]}
end
test "a lot of commands so that TCP gets stressed", %{conn: c} do
assert {:ok, "OK"} = Redix.command(c, ~w(SET stress_pipeline foo))
ncommands = 10000
commands = List.duplicate(~w(GET stress_pipeline), ncommands)
# Let's do it twice to be sure the server can handle the data.
{:ok, results} = Redix.pipeline(c, commands)
assert length(results) == ncommands
{:ok, results} = Redix.pipeline(c, commands)
assert length(results) == ncommands
end
test "a single command should still return a list of results", %{conn: c} do
assert Redix.pipeline(c, [["PING"]]) == {:ok, ["PONG"]}
end
test "Redis errors in the response", %{conn: c} do
msg = "ERR value is not an integer or out of range"
assert {:ok, resp} = Redix.pipeline(c, [~w(SET pipeline_errs foo), ~w(INCR pipeline_errs)])
assert resp == ["OK", %Error{message: msg}]
end
test "passing an empty list of commands raises an error", %{conn: c} do
msg = "no commands passed to the pipeline"
assert_raise ArgumentError, msg, fn -> Redix.pipeline(c, []) end
end
test "passing one or more empty commands returns an error", %{conn: c} do
message = "got an empty command ([]), which is not a valid Redis command"
assert_raise ArgumentError, message, fn ->
Redix.pipeline(c, [[]])
end
assert_raise ArgumentError, message, fn ->
Redix.pipeline(c, [["PING"], [], ["PING"]])
end
end
test "passing a PubSub command causes an error", %{conn: c} do
assert_raise ArgumentError, ~r{Redix doesn't support Pub/Sub}, fn ->
Redix.pipeline(c, [["PING"], ["SUBSCRIBE", "foo"]])
end
end
test "timeout", %{conn: c} do
assert {:error, %ConnectionError{reason: :timeout}} =
Redix.pipeline(c, [~w(PING), ~w(PING)], timeout: 0)
end
test "commands must be lists of binaries", %{conn: c} do
message = "expected a list of Redis commands, got: \"PING\""
assert_raise ArgumentError, message, fn ->
Redix.pipeline(c, "PING")
end
message = "expected a list of binaries as each Redis command, got: \"PING\""
assert_raise ArgumentError, message, fn ->
Redix.pipeline(c, ["PING"])
end
end
test "emits Telemetry events on successful pipelines", %{conn: c} do
{test_name, _arity} = __ENV__.function
parent = self()
ref = make_ref()
handler = fn event, measurements, meta, _config ->
if meta.connection == c do
assert event == [:redix, :pipeline]
assert is_integer(measurements.elapsed_time) and measurements.elapsed_time > 0
assert meta.commands == [["PING"]]
assert is_integer(meta.start_time)
end
send(parent, ref)
end
:telemetry.attach(to_string(test_name), [:redix, :pipeline], handler, :no_config)
assert {:ok, ["PONG"]} = Redix.pipeline(c, [["PING"]])
assert_receive ^ref
:telemetry.detach(to_string(test_name))
end
test "emits Telemetry events on error pipelines", %{conn: c} do
{test_name, _arity} = __ENV__.function
parent = self()
ref = make_ref()
handler = fn event, measurements, meta, _config ->
if meta.connection == c do
assert event == [:redix, :pipeline, :error]
assert measurements == %{}
assert meta.commands == [["PING"], ["PING"]]
assert is_integer(meta.start_time)
assert meta.reason == %ConnectionError{reason: :timeout}
end
send(parent, ref)
end
:telemetry.attach(to_string(test_name), [:redix, :pipeline, :error], handler, :no_config)
assert {:error, %ConnectionError{reason: :timeout}} =
Redix.pipeline(c, [~w(PING), ~w(PING)], timeout: 0)
assert_receive ^ref
:telemetry.detach(to_string(test_name))
end
end
describe "command!/2" do
setup :connect
test "simple commands", %{conn: c} do
assert Redix.command!(c, ["PING"]) == "PONG"
assert Redix.command!(c, ["SET", "bang", "foo"]) == "OK"
assert Redix.command!(c, ["GET", "bang"]) == "foo"
end
test "Redis errors", %{conn: c} do
assert_raise Redix.Error, ~r/ERR unknown command .NONEXISTENT./, fn ->
Redix.command!(c, ["NONEXISTENT"])
end
"OK" = Redix.command!(c, ["SET", "bang_errors", "foo"])
assert_raise Redix.Error, "ERR value is not an integer or out of range", fn ->
Redix.command!(c, ["INCR", "bang_errors"])
end
end
test "connection errors", %{conn: c} do
assert_raise Redix.ConnectionError, ":timeout", fn ->
Redix.command!(c, ["PING"], timeout: 0)
end
end
end
describe "pipeline!/2" do
setup :connect
test "simple commands", %{conn: c} do
assert Redix.pipeline!(c, [~w(SET ppbang foo), ~w(GET ppbang)]) == ~w(OK foo)
end
test "Redis errors in the list of results", %{conn: c} do
commands = [~w(SET ppbang_errors foo), ~w(INCR ppbang_errors)]
msg = "ERR value is not an integer or out of range"
assert Redix.pipeline!(c, commands) == ["OK", %Redix.Error{message: msg}]
end
test "connection errors", %{conn: c} do
assert_raise Redix.ConnectionError, ":timeout", fn ->
Redix.pipeline!(c, [["PING"]], timeout: 0)
end
end
end
describe "transaction_pipeline/3" do
setup :connect
test "non-bang version", %{conn: conn} do
commands = [~w(SET transaction_pipeline_key 1), ~w(GET transaction_pipeline_key)]
assert Redix.transaction_pipeline(conn, commands) == {:ok, ["OK", "1"]}
end
test "bang version", %{conn: conn} do
commands = [~w(SET transaction_pipeline_key 1), ~w(GET transaction_pipeline_key)]
assert Redix.transaction_pipeline!(conn, commands) == ["OK", "1"]
end
end
describe "noreply_* functions" do
setup :connect
test "noreply_pipeline/3", %{conn: conn} do
commands = [~w(INCR noreply_pl_mykey), ~w(INCR noreply_pl_mykey)]
assert Redix.noreply_pipeline(conn, commands) == :ok
assert Redix.command!(conn, ~w(GET noreply_pl_mykey)) == "2"
end
test "noreply_command/3", %{conn: conn} do
assert Redix.noreply_command(conn, ["SET", "noreply_cmd_mykey", "myvalue"]) == :ok
assert Redix.command!(conn, ["GET", "noreply_cmd_mykey"]) == "myvalue"
end
end
describe "timeouts and network errors" do
setup :connect
test "client suicide and reconnections", %{conn: c} do
capture_log(fn ->
assert {:ok, _} = Redix.command(c, ~w(QUIT))
# When the socket is closed, we reply with {:error, closed}. We sleep so
# we're sure that the socket is closed (and we don't get {:error,
# disconnected} before the socket closed after we sent the PING command
# to Redix).
:timer.sleep(100)
assert Redix.command(c, ~w(PING)) == {:error, %ConnectionError{reason: :closed}}
# Redix retries the first reconnection after 500ms, and we waited 100 already.
:timer.sleep(500)
assert {:ok, "PONG"} = Redix.command(c, ~w(PING))
end)
end
test "timeouts", %{conn: c} do
assert {:error, %ConnectionError{reason: :timeout}} = Redix.command(c, ~w(PING), timeout: 0)
# Let's check that the Redix connection doesn't reply anyways, even if the
# timeout happened.
refute_receive {_ref, _message}
end
test "mid-command disconnections", %{conn: conn} do
{:ok, kill_conn} = Redix.start_link()
capture_log(fn ->
task = Task.async(fn -> Redix.command(conn, ~w(BLPOP mid_command_disconnection 0)) end)
# Give the task the time to issue the command to Redis, then kill the connection.
Process.sleep(50)
Redix.command!(kill_conn, ~w(CLIENT KILL TYPE normal SKIPME yes))
assert Task.await(task, 100) == {:error, %ConnectionError{reason: :disconnected}}
end)
end
test "no leaking messages when timeout happens at the same time as disconnections", %{
conn: conn
} do
{:ok, kill_conn} = Redix.start_link()
capture_log(fn ->
{_pid, ref} =
Process.spawn(
fn ->
error = %ConnectionError{reason: :timeout}
assert Redix.command(conn, ~w(BLPOP my_list 0), timeout: 0) == {:error, error}
# The fact that we timed out should be respected here, even if the
# connection is killed (no {:error, :disconnected} message should
# arrive).
refute_receive {_ref, _message}
end,
[:link, :monitor]
)
# Give the process time to issue the command to Redis, then kill the connection.
Process.sleep(50)
Redix.command!(kill_conn, ~w(CLIENT KILL TYPE normal SKIPME yes))
assert_receive {:DOWN, ^ref, _, _, _}, 200
end)
end
end
test ":exit_on_disconnection option" do
{:ok, c} = Redix.start_link(exit_on_disconnection: true)
Process.flag(:trap_exit, true)
capture_log(fn ->
Redix.command!(c, ~w(QUIT))
assert_receive {:EXIT, ^c, %ConnectionError{reason: :tcp_closed}}
end)
end
defp connect(_context) do
{:ok, conn} = Redix.start_link()
{:ok, %{conn: conn}}
end
end
| 32.239923 | 98 | 0.598976 |
7302e9cf608f31b522ab129f23252bd8c05df922 | 3,288 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/creative_native_ad.ex | ericrwolfe/elixir-google-api | 3dc0f17edd5e2d6843580c16ddae3bf84b664ffd | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/creative_native_ad.ex | ericrwolfe/elixir-google-api | 3dc0f17edd5e2d6843580c16ddae3bf84b664ffd | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/creative_native_ad.ex | ericrwolfe/elixir-google-api | 3dc0f17edd5e2d6843580c16ddae3bf84b664ffd | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AdExchangeBuyer.V14.Model.CreativeNativeAd do
@moduledoc """
If nativeAd is set, HTMLSnippet and the videoURL outside of nativeAd should not be set. (The videoURL inside nativeAd can be set.)
## Attributes
- advertiser (String): Defaults to: `null`.
- appIcon (CreativeNativeAdAppIcon): Defaults to: `null`.
- body (String): A long description of the ad. Defaults to: `null`.
- callToAction (String): A label for the button that the user is supposed to click. Defaults to: `null`.
- clickLinkUrl (String): The URL that the browser/SDK will load when the user clicks the ad. Defaults to: `null`.
- clickTrackingUrl (String): The URL to use for click tracking. Defaults to: `null`.
- headline (String): A short title for the ad. Defaults to: `null`.
- image (CreativeNativeAdImage): Defaults to: `null`.
- impressionTrackingUrl (List[String]): The URLs are called when the impression is rendered. Defaults to: `null`.
- logo (CreativeNativeAdLogo): Defaults to: `null`.
- price (String): The price of the promoted app including the currency info. Defaults to: `null`.
- starRating (Float): The app rating in the app store. Must be in the range [0-5]. Defaults to: `null`.
- store (String): The URL to the app store to purchase/download the promoted app. Defaults to: `null`.
- videoURL (String): The URL of the XML VAST for a native ad. Note this is a separate field from resource.video_url. Defaults to: `null`.
"""
defstruct [
:advertiser,
:appIcon,
:body,
:callToAction,
:clickLinkUrl,
:clickTrackingUrl,
:headline,
:image,
:impressionTrackingUrl,
:logo,
:price,
:starRating,
:store,
:videoURL
]
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V14.Model.CreativeNativeAd do
import GoogleApi.AdExchangeBuyer.V14.Deserializer
def decode(value, options) do
value
|> deserialize(
:appIcon,
:struct,
GoogleApi.AdExchangeBuyer.V14.Model.CreativeNativeAdAppIcon,
options
)
|> deserialize(
:image,
:struct,
GoogleApi.AdExchangeBuyer.V14.Model.CreativeNativeAdImage,
options
)
|> deserialize(
:logo,
:struct,
GoogleApi.AdExchangeBuyer.V14.Model.CreativeNativeAdLogo,
options
)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V14.Model.CreativeNativeAd do
def encode(value, options) do
GoogleApi.AdExchangeBuyer.V14.Deserializer.serialize_non_nil(value, options)
end
end
| 36.533333 | 139 | 0.716241 |
73032d8cecd66a00e0e42658b8493fb1e721533e | 3,197 | ex | Elixir | lib/koans/03_numbers.ex | zups/elixir-koans | ac036085be4403421d4aff713f5c4328a5624662 | [
"MIT"
] | null | null | null | lib/koans/03_numbers.ex | zups/elixir-koans | ac036085be4403421d4aff713f5c4328a5624662 | [
"MIT"
] | null | null | null | lib/koans/03_numbers.ex | zups/elixir-koans | ac036085be4403421d4aff713f5c4328a5624662 | [
"MIT"
] | null | null | null | defmodule Numbers do
require Integer
use Koans
@intro "Why is the number six so scared? Because seven eight nine!\nWe should get to know numbers a bit more!"
koan "Is an integer equal to its float equivalent?" do
assert 1 == 1.0 == true
end
koan "Is an integer threequal to its float equivalent?" do
assert 1 === 1.0 == false
end
koan "Revisit division with threequal" do
assert 2 / 2 === 1.0
end
koan "Another way to divide" do
assert div(5, 2) == 2
end
koan "What remains or: The Case of the Missing Modulo Operator (%)" do
assert rem(5, 2) == 1
end
koan "Other math operators may produce this" do
assert 2 * 2 === 4
end
koan "Or other math operators may produce this" do
assert 2 * 2.0 === 4.0
end
koan "Two ways to round, are they exactly the same?" do
assert Float.round(1.2) === round(1.2) == false
end
koan "Release the decimals into the void" do
assert trunc(5.6) === 5
end
koan "Are you odd?" do
assert Integer.is_odd(3) == true
end
koan "Actually you might be even" do
assert Integer.is_even(4) == true
end
koan "Let's grab the individual digits in a list" do
individual_digits = Integer.digits(58127)
assert individual_digits == [5,8,1,2,7]
end
koan "Oh no! I need it back together" do
number = Integer.undigits([1, 2, 3, 4])
assert number == 1234
end
koan "Actually I want my number as a string" do
string_digit = Integer.to_string(1234)
assert string_digit == "1234"
end
koan "The meaning of life in hexadecimal is 2A!" do
assert Integer.parse("2A", 16) == {42, ""}
end
koan "The remaining unparsable part is also returned" do
assert Integer.parse("5 years") == {5, " years"}
end
koan "What if you parse a floating point value as an integer?" do
assert Integer.parse("1.2") == {1, ".2"}
end
koan "Just want to parse to a float" do
assert Float.parse("34.5") == {34.5, ""}
end
koan "Hmm, I want to parse this but it has some strings" do
assert Float.parse("1.5 million dollars") == {1.5, " million dollars"}
end
koan "I don't want this decimal point, let's round up" do
assert Float.ceil(34.25) == 35.0
end
koan "OK, I only want it to 1 decimal place" do
assert Float.ceil(34.25, 1) == 34.3
end
koan "Rounding down is what I need" do
assert Float.floor(99.99) == 99.0
end
koan "Rounding down to 2 decimal places" do
assert Float.floor(12.345, 2) == 12.34
end
koan "Round the number up or down for me" do
assert Float.round(5.5) == 6.0
assert Float.round(5.4) == 5.0
assert Float.round(8.94, 1) == 8.9
assert Float.round(-5.5674, 3) == -5.567
end
koan "I want the first and last in the range" do
first..last = Range.new(1, 10)
assert first == 1
assert last == 10
end
koan "Does my number exist in the range?" do
range = Range.new(1, 10)
assert 4 in range == true
assert 10 in range == true
assert 0 in range == false
end
def is_range?(%Range{}), do: true
def is_range?(_), do: false
koan "Is this a range?" do
assert is_range?(1..10) == true
assert is_range?(0) == false
end
end
| 23.858209 | 112 | 0.635909 |
73035f768f798856f36a2143d026554962133956 | 2,527 | exs | Elixir | mix.exs | tcitworld/cldr_dates_times | 4b2cff026b135caf840c471c42a0d27c4953e2e4 | [
"Apache-2.0"
] | null | null | null | mix.exs | tcitworld/cldr_dates_times | 4b2cff026b135caf840c471c42a0d27c4953e2e4 | [
"Apache-2.0"
] | null | null | null | mix.exs | tcitworld/cldr_dates_times | 4b2cff026b135caf840c471c42a0d27c4953e2e4 | [
"Apache-2.0"
] | null | null | null | defmodule CldrDatesTimes.Mixfile do
use Mix.Project
@version "2.4.0-rc.0"
def project do
[
app: :ex_cldr_dates_times,
version: @version,
name: "Cldr Dates & Times",
source_url: "https://github.com/elixir-cldr/cldr_dates_times",
docs: docs(),
elixir: "~> 1.8",
description: description(),
package: package(),
start_permanent: Mix.env() == :prod,
deps: deps(),
compilers: Mix.compilers(),
elixirc_paths: elixirc_paths(Mix.env()),
dialyzer: [
ignore_warnings: ".dialyzer_ignore_warnings"
]
]
end
defp description do
"""
Date, Time and DateTime localization, internationalization and formatting
functions using the Common Locale Data Repository (CLDR).
"""
end
def application do
[
extra_applications: [:logger]
]
end
def docs do
[
source_ref: "v#{@version}",
main: "readme",
extras: ["README.md", "CHANGELOG.md", "LICENSE.md"],
logo: "logo.png",
groups_for_modules: groups_for_modules(),
skip_undefined_reference_warnings_on: ["changelog", "readme"]
]
end
defp groups_for_modules do
[
Helpers: [
Cldr.DateTime.Compiler,
Cldr.DateTime.Format,
Cldr.DateTime.Formatter,
Cldr.DateTime.Timezone
]
]
end
defp deps do
[
{:ex_cldr_numbers, "~> 2.13-rc"},
{:ex_cldr_calendars, "~> 1.8-rc"},
{:ex_doc, "~> 0.18", optional: true, only: [:dev, :release], runtime: false},
{:jason, "~> 1.0", optional: true},
{:benchee, "~> 1.0", optional: true, only: :dev, runtime: false},
{:dialyxir, "~> 1.0", only: [:dev], runtime: false}
]
end
defp package do
[
maintainers: ["Kip Cole"],
licenses: ["Apache 2.0"],
links: links(),
files: [
"lib",
"src/datetime_format_lexer.xrl",
"config",
"mix.exs",
"README*",
"CHANGELOG*",
"LICENSE*"
]
]
end
def links do
%{
"GitHub" => "https://github.com/elixir-cldr/cldr_dates_times",
"Changelog" =>
"https://github.com/elixir-cldr/cldr_dates_times/blob/v#{@version}/CHANGELOG.md",
"Readme" => "https://github.com/elixir-cldr/cldr_dates_times/blob/v#{@version}/README.md"
}
end
defp elixirc_paths(:test), do: ["lib", "mix", "test"]
defp elixirc_paths(:dev), do: ["lib", "mix"]
defp elixirc_paths(:docs), do: ["lib", "mix"]
defp elixirc_paths(_), do: ["lib"]
end
| 24.298077 | 95 | 0.574594 |
73037788c0ee469b0de723c30cdd701466b3d84d | 2,139 | ex | Elixir | src/lib/changelog/data/news/news_ad.ex | thenets/docker-chocoquest | 856fa5ff41a5831ed67b1ef865cd8951df5af023 | [
"MIT"
] | 1 | 2018-01-22T20:07:10.000Z | 2018-01-22T20:07:10.000Z | lib/changelog/data/news/news_ad.ex | joebew42/changelog.com | da4ec68d15f3a2b4b6c29033443d7e7afe814d18 | [
"MIT"
] | null | null | null | lib/changelog/data/news/news_ad.ex | joebew42/changelog.com | da4ec68d15f3a2b4b6c29033443d7e7afe814d18 | [
"MIT"
] | null | null | null | defmodule Changelog.NewsAd do
use Changelog.Data
alias Changelog.{Files, NewsIssueAd, NewsSponsorship, Regexp}
schema "news_ads" do
field :url, :string
field :headline, :string
field :story, :string
field :image, Files.Image.Type
field :active, :boolean, default: true
field :newsletter, :boolean, default: false
field :impression_count, :integer, default: 0
field :click_count, :integer, default: 0
field :delete, :boolean, virtual: true
belongs_to :sponsorship, NewsSponsorship
has_many :news_issue_ads, NewsIssueAd, foreign_key: :ad_id, on_delete: :delete_all
has_many :issues, through: [:news_issue_ads, :issue]
timestamps()
end
def changeset(ad, attrs \\ %{}) do
ad
|> cast(attrs, ~w(url headline story active newsletter delete))
|> cast_attachments(attrs, ~w(image))
|> validate_required([:url, :headline])
|> validate_format(:url, Regexp.http, message: Regexp.http_message)
|> foreign_key_constraint(:sponsorship_id)
|> mark_for_deletion()
end
def preload_all(ad) do
ad
|> preload_issues
|> preload_sponsorship
end
def preload_issues(ad) do
ad
|> Repo.preload(news_issue_ads: {NewsIssueAd.by_position, :issue})
|> Repo.preload(:issues)
end
def preload_sponsorship(query = %Ecto.Query{}), do: Ecto.Query.preload(query, sponsorship: :sponsor)
def preload_sponsorship(ad), do: Repo.preload(ad, sponsorship: :sponsor)
def has_no_issues(ad), do: preload_issues(ad).issues |> Enum.empty?
def track_click(ad) do
ad
|> change(%{click_count: ad.click_count + 1})
|> Repo.update!
ad.sponsorship
|> change(%{click_count: ad.sponsorship.click_count + 1})
|> Repo.update!
end
def track_impression(ad) do
ad
|> change(%{impression_count: ad.impression_count + 1})
|> Repo.update!
ad.sponsorship
|> change(%{impression_count: ad.sponsorship.impression_count + 1})
|> Repo.update!
end
defp mark_for_deletion(changeset) do
if get_change(changeset, :delete) do
%{changeset | action: :delete}
else
changeset
end
end
end
| 26.085366 | 102 | 0.680692 |
73038f3402c20e9aa8c233e0b45373d633b4c4a2 | 74 | exs | Elixir | apps/crm_web/test/crm_web/views/layout_view_test.exs | jdambron/phoenix_crm | 6d17280e3a6655990840f4714a338f58b7d0c756 | [
"MIT"
] | null | null | null | apps/crm_web/test/crm_web/views/layout_view_test.exs | jdambron/phoenix_crm | 6d17280e3a6655990840f4714a338f58b7d0c756 | [
"MIT"
] | 1 | 2020-04-06T07:42:36.000Z | 2020-04-06T07:42:36.000Z | apps/crm_web/test/crm_web/views/layout_view_test.exs | jdambron/phoenix_crm | 6d17280e3a6655990840f4714a338f58b7d0c756 | [
"MIT"
] | null | null | null | defmodule CrmWeb.LayoutViewTest do
use CrmWeb.ConnCase, async: true
end
| 18.5 | 34 | 0.810811 |
730399d051ef85e6d1f01c40d9ecce858efb19fb | 357 | exs | Elixir | elixir/emit_log.exs | Diffblue-benchmarks/Rabbitmq-rabbitmq-tutorials | b887f2313a8698bd7b2eb212ea7987eb7d99b6fb | [
"Apache-2.0"
] | 5,584 | 2015-01-01T18:08:07.000Z | 2022-03-30T19:22:09.000Z | elixir/emit_log.exs | jeff9571/rabbitmq-tutorials | 180705662b16063d1983a4a82ebca3c2ec83d8d3 | [
"Apache-2.0"
] | 210 | 2015-01-01T04:34:10.000Z | 2022-03-15T14:18:21.000Z | elixir/emit_log.exs | jeff9571/rabbitmq-tutorials | 180705662b16063d1983a4a82ebca3c2ec83d8d3 | [
"Apache-2.0"
] | 3,682 | 2015-01-02T06:49:40.000Z | 2022-03-30T06:55:06.000Z | {:ok, connection} = AMQP.Connection.open
{:ok, channel} = AMQP.Channel.open(connection)
message =
case System.argv do
[] -> "Hello World!"
words -> Enum.join(words, " ")
end
AMQP.Exchange.declare(channel, "logs", :fanout)
AMQP.Basic.publish(channel, "logs", "", message)
IO.puts " [x] Sent '#{message}'"
AMQP.Connection.close(connection)
| 23.8 | 48 | 0.658263 |
7303aa8c1319c668e6941538105226e4b30d0905 | 5,167 | exs | Elixir | test/crawlie/stage/url_manager_test.exs | kianmeng/crawlie | 19883f17a208107927ba14d15312f5a908d5e8ea | [
"MIT"
] | 91 | 2016-12-29T12:31:14.000Z | 2021-09-25T23:09:34.000Z | test/crawlie/stage/url_manager_test.exs | kianmeng/crawlie | 19883f17a208107927ba14d15312f5a908d5e8ea | [
"MIT"
] | 40 | 2016-12-14T00:55:52.000Z | 2022-01-29T08:46:03.000Z | test/crawlie/stage/url_manager_test.exs | kianmeng/crawlie | 19883f17a208107927ba14d15312f5a908d5e8ea | [
"MIT"
] | 10 | 2017-04-06T11:18:10.000Z | 2021-10-30T00:04:09.000Z | defmodule Crawlie.Stage.UrlManagerTest do
use ExUnit.Case
import ExUnit.CaptureLog
alias Crawlie.Page
alias Crawlie.Stage.UrlManager
alias Crawlie.PqueueWrapper, as: PW
alias UrlManager.State
doctest UrlManager
@foo URI.parse("foo")
@pfoo Page.new(@foo)
@bar URI.parse("bar")
@pbar Page.new(@bar)
@baz URI.parse("baz")
@pbaz Page.new(@baz)
@pages [@pfoo, @pbar, @pbaz]
@pq_module :pqueue3
@options [foo: :bar, pqueue_module: @pq_module]
#---------------------------------------------------------------------------
# testing State
#---------------------------------------------------------------------------
test "constructor" do
empty = State.new([], @options)
assert State.finished_crawling?(empty) == true
state = State.new(@pages, @options)
assert length(@pages) == PW.len(state.discovered)
assert Enum.sort(@pages) == Enum.sort(PW.all(state.discovered))
assert state.options == @options
assert Enum.count(state.visited) == 3
assert State.finished_crawling?(state) == false
end
test "take_pages takes the pages from the priority queue" do
state = State.new(@pages, @options)
assert State.finished_crawling?(state) == false
{new_state, pages} = State.take_pages(state, 2)
# those could actually be any other two, but this is easier to test ;)
assert pages == [@pbar, @pfoo]
assert new_state.options == state.options
assert PW.len(new_state.discovered) == 1
assert {_, @pbaz} = PW.take(new_state.discovered)
assert State.finished_crawling?(new_state) == false
end
test "take_pages handles the case where everything gets empty" do
state = State.new(@pages, @options)
{new_state, pages} = State.take_pages(state, 66)
assert Enum.sort(pages) == Enum.sort(@pages)
assert new_state.options == state.options
refute State.finished_crawling?(new_state)
state = new_state
[a, b, c] = @pages
state = State.finished_processing(state, a.uri)
refute State.finished_crawling?(state)
state = State.finished_processing(state, b.uri)
refute State.finished_crawling?(state)
state = State.finished_processing(state, c.uri)
assert State.finished_crawling?(state)
end
test "add_pages/2" do
state = State.new([], [max_depth: 5, max_retries: 3] ++ @options)
p1 = %Page{uri: uri(@foo), depth: 5, retries: 3}
p2 = %Page{uri: uri(@bar), depth: 6, retries: 0} # too deep
p3 = %Page{uri: uri(@bar), depth: 1, retries: 4} # too many retries
assert capture_log(fn ->
new_state = State.add_pages(state, [p1, p2, p3])
assert PW.len(new_state.discovered) == 1
assert {_, ^p1} = PW.take(new_state.discovered)
end) =~ "[error] Trying to add a page \"bar\" with 'depth' > max_depth:"
end
test "add_pages/2 doesn't add duplicate uris" do
state = State.new([], [max_depth: 5] ++ @options)
p1 = %Page{uri: uri(@foo), depth: 0, retries: 0}
p2 = %Page{uri: uri(@foo), depth: 1, retries: 0}
new_state = State.add_pages(state, [p1, p2])
assert PW.len(new_state.discovered) == 1
assert {_, ^p1} = PW.take(new_state.discovered)
end
test "adding a page that was retrieved before doesn't make it fetched again" do
pages = [@foo, @bar] |> Enum.map(&Page.new/1)
state = State.new(pages, [max_depth: 5, max_retries: 3] ++ @options)
{state, _} = State.take_pages(state, 10)
retried = %Page{uri: uri(@foo), depth: 1, retries: 0}
state = State.add_pages(state, [retried])
assert {_, pages} = State.take_pages(state, 10)
assert pages == []
end
test "even if the input urls contain duplicates, the output ones don't" do
pages = [@foo, @foo, @bar, @baz] |> Enum.map(&Page.new/1)
pages2 = [@bar, "ban"] |> Enum.map(&Page.new/1)
state = State.new(pages, [max_depth: 5, max_retries: 3] ++ @options)
state = State.add_pages(state, pages2)
{_state, pages} = State.take_pages(state, 10)
assert Enum.sort(pages) == Enum.sort(Enum.map([@foo, @bar, @baz, "ban"], &Page.new/1))
end
test "tracks items in-flight" do
empty = State.new([], @options)
assert State.finished_crawling?(empty)
state = State.started_processing(empty, @foo)
refute State.finished_crawling?(state)
state = State.started_processing(state, @bar)
refute State.finished_crawling?(state)
state = State.finished_processing(state, @bar)
refute State.finished_crawling?(state)
state = State.finished_processing(state, @foo)
assert State.finished_crawling?(state)
end
#---------------------------------------------------------------------------
# Testing Manager
#---------------------------------------------------------------------------
test "init/1" do
args = %{pages: @pages, crawlie_options: @options}
assert {:producer, state} = UrlManager.init(args)
assert PW.len(state.discovered) == 3
assert state.options == @options
end
#===========================================================================
# Helper Functions
#===========================================================================
defp uri(url), do: URI.parse(url)
end
| 31.895062 | 90 | 0.602671 |
7303cf5fcc98b37e578d101c32c1bb88e260dd7e | 1,229 | exs | Elixir | mix.exs | enjolras1205/exrm | b34bdde13ad65e3d3cd9e6f42cf9f378dcca188d | [
"MIT"
] | null | null | null | mix.exs | enjolras1205/exrm | b34bdde13ad65e3d3cd9e6f42cf9f378dcca188d | [
"MIT"
] | null | null | null | mix.exs | enjolras1205/exrm | b34bdde13ad65e3d3cd9e6f42cf9f378dcca188d | [
"MIT"
] | null | null | null | defmodule ReleaseManager.Mixfile do
use Mix.Project
def project do
[ app: :exrm,
version: "1.0.9",
elixir: "~> 1.0",
description: description(),
package: package(),
deps: deps(),
docs: docs(),
test_coverage: [tool: Coverex.Task, coveralls: true]]
end
def application, do: [
applications: [:logger, :relx]
]
def deps do
[{:relx, "~> 3.5" },
{:earmark, "~> 1.0", only: :dev},
{:ex_doc, "~> 0.13", only: :dev},
{:coverex, "~> 1.4", only: :test}]
end
defp description do
"""
Exrm, or Elixir Release Manager, provides mix tasks for building,
upgrading, and controlling release packages for your application.
"""
end
defp package do
[ files: ["lib", "priv", "mix.exs", "README.md", "LICENSE"],
maintainers: ["Paul Schoenfelder"],
licenses: ["MIT"],
links: %{ "GitHub": "https://github.com/bitwalker/exrm" } ]
end
defp docs do
[main: "getting-started",
extras: [
"docs/Getting Started.md",
"docs/Release Configuration.md",
"docs/Deployment.md",
"docs/Upgrades and Downgrades.md",
"docs/Common Issues.md",
"docs/Examples.md"
]]
end
end
| 23.188679 | 69 | 0.567941 |
7303f917ef0ad9632333155807a2091fa688efa3 | 1,725 | exs | Elixir | test/integration/prefetch_count_2_test.exs | mariosant/ex-tackle | 4b9e47f6a6b02acd275596da1f8aa389888785bc | [
"MIT"
] | null | null | null | test/integration/prefetch_count_2_test.exs | mariosant/ex-tackle | 4b9e47f6a6b02acd275596da1f8aa389888785bc | [
"MIT"
] | null | null | null | test/integration/prefetch_count_2_test.exs | mariosant/ex-tackle | 4b9e47f6a6b02acd275596da1f8aa389888785bc | [
"MIT"
] | null | null | null | defmodule Tackle.ParallelMessageHandling_2_Test do
use ExSpec
alias Support
alias Support.MessageTrace
defmodule TestConsumer do
use Tackle.Consumer,
url: "amqp://localhost",
exchange: "test-prefetch-2-exchange",
routing_key: "prefetch",
service: "prefetch-count-service",
prefetch_count: 2
def handle_message(message) do
"#PID" <> spid = message
sup = spid |> String.to_char_list |> :erlang.list_to_pid
Task.Supervisor.async_nolink(sup, fn-> :timer.sleep :infinity end)
receive do msg -> nil end
end
end
@publish_options %{
url: "amqp://localhost",
exchange: "test-prefetch-2-exchange",
routing_key: "prefetch",
}
setup do
{:ok, _} = TestConsumer.start_link
{:ok, sup} = Task.Supervisor.start_link()
|> IO.inspect
:timer.sleep(1000)
{:ok, [sup: sup]}
end
describe "parallel message handling" do
it "handles messages in pairs", context do
sup = context[:sup]
Tackle.publish(sup |> inspect, @publish_options)
Tackle.publish(sup |> inspect, @publish_options)
Tackle.publish(sup |> inspect, @publish_options)
Tackle.publish(sup |> inspect, @publish_options)
:timer.sleep(1000)
assert Task.Supervisor.children(sup) |> Enum.count == 2
Task.Supervisor.children(sup)
|> Enum.each(fn pid -> Task.Supervisor.terminate_child(sup, pid) end)
:timer.sleep(1000)
assert Task.Supervisor.children(sup) |> Enum.count == 2
Task.Supervisor.children(sup)
|> Enum.each(fn pid -> Task.Supervisor.terminate_child(sup, pid) end)
:timer.sleep(1000)
assert Task.Supervisor.children(sup) |> Enum.count == 0
end
end
end
| 26.136364 | 75 | 0.653333 |
73044c5b7ba8af76609712b910ce32c0ac5552fe | 1,391 | ex | Elixir | lib/utils.ex | rramsden/scribe | 4ef506714140da032a76d62b39109cf7a79c9d91 | [
"MIT"
] | 1 | 2016-01-24T11:43:59.000Z | 2016-01-24T11:43:59.000Z | lib/utils.ex | rramsden/scribe | 4ef506714140da032a76d62b39109cf7a79c9d91 | [
"MIT"
] | null | null | null | lib/utils.ex | rramsden/scribe | 4ef506714140da032a76d62b39109cf7a79c9d91 | [
"MIT"
] | null | null | null | defmodule Scribe.Utils do
alias Scribe.Config, as: Config
@moduledoc """
This module contains utility functions used in Scribe
"""
@doc false
defmacro time(name, block) do
quote do
start_ms = timestamp_ms
puts "== Task: #{unquote(name)} started ==================================="
unquote(block)
puts "== Task: #{unquote(name)} (#{(timestamp_ms - start_ms) / 1000}ms) finished =================================="
end
end
@doc """
Load Scribe configuration file located in db/config.exs
"""
def load_config(config_path) do
{:ok, config} = File.read(config_path)
{config, _} = Code.eval_string(config)
camelized = Mix.Utils.camelize(config[:adapter])
config = Enum.map(config, fn({key, value}) -> {key, binary_to_list(value)} end)
{adapter, _} = Code.eval_string("Scribe.Adapters.#{camelized}")
config = Config.new(config)
config.adapter(adapter)
end
@doc """
Generate a timestamp using system command date +%s
"""
def timestamp do
System.cmd("date +%s") |> String.strip
end
@doc """
Get timestamp from epoch in milliseconds
"""
def timestamp_ms do
{mega, second, micro} = :erlang.now
(mega * 1000000 + second) * 1000000 + micro
end
def puts(output) do
if Mix.env == :test do
# output nothing
else
IO.puts IO.ANSI.escape(output)
end
end
end
| 25.290909 | 122 | 0.608914 |
73045a23f1e54aee321191f99c59796fdf414abf | 900 | ex | Elixir | web/models/user.ex | chillicoder/MyRumbl | 79b5c23c8df2b82da1be22fc35ba20891031df84 | [
"MIT"
] | null | null | null | web/models/user.ex | chillicoder/MyRumbl | 79b5c23c8df2b82da1be22fc35ba20891031df84 | [
"MIT"
] | null | null | null | web/models/user.ex | chillicoder/MyRumbl | 79b5c23c8df2b82da1be22fc35ba20891031df84 | [
"MIT"
] | null | null | null | defmodule Rumbl.User do
use Rumbl.Web, :model
schema "users" do
field :name, :string
field :username, :string
field :password, :string, virtual: true
field :password_hash, :string
has_many :videos, Rumbl.Video
timestamps
end
def changeset(model, params \\ :empty) do
model
|> cast(params, ~w(name username), [])
|> validate_length(:username, min: 1, max: 20)
|> unique_constraint(:username)
end
def registration_changeset(model, params) do
model
|> changeset(params)
|> cast(params, ~w(password), [])
|> validate_length(:password, min: 6, max: 100)
|> put_pass_hash()
end
defp put_pass_hash(changeset) do
case changeset do
%Ecto.Changeset{valid?: true,changes: %{password: pass}} ->
put_change(changeset,:password_hash, Comeonin.Bcrypt.hashpwsalt(pass))
_ ->
changeset
end
end
end
| 23.684211 | 78 | 0.645556 |
7304b97335d3de337b324eb6029924abde280511 | 1,311 | ex | Elixir | lib/hierbautberlin_web/views/map_route_helpers.ex | HierBautBerlin/website | 91410e7c61c1efad438fe84bf550f87b0056c440 | [
"MIT"
] | 13 | 2021-03-06T12:16:34.000Z | 2022-03-31T09:46:35.000Z | lib/hierbautberlin_web/views/map_route_helpers.ex | HierBautBerlin/website | 91410e7c61c1efad438fe84bf550f87b0056c440 | [
"MIT"
] | 148 | 2021-03-05T12:44:55.000Z | 2022-03-11T12:09:06.000Z | lib/hierbautberlin_web/views/map_route_helpers.ex | HierBautBerlin/website | 91410e7c61c1efad438fe84bf550f87b0056c440 | [
"MIT"
] | 2 | 2021-06-02T14:31:21.000Z | 2022-02-14T08:36:51.000Z | defmodule HierbautberlinWeb.MapRouteHelpers do
alias HierbautberlinWeb.Router.Helpers, as: Routes
alias Hierbautberlin.GeoData
alias Hierbautberlin.GeoData.{GeoItem, NewsItem}
def route_to_map(
conn_or_endpoint,
map_position,
map_zoom,
detail_item \\ nil,
detail_item_type \\ nil
) do
route_params = [
lat: to_string(map_position.lat),
lng: to_string(map_position.lng),
zoom: to_string(map_zoom)
]
route_params =
if detail_item do
route_params ++
[
details: to_string(detail_item.id),
detailsType: to_string(detail_item_type)
]
else
route_params
end
Routes.map_path(conn_or_endpoint, :index, route_params)
end
def link_to_details(endpoint, item) do
%{lat: lat, lng: lng} = GeoData.get_point(item)
if lat && lng do
Routes.map_url(endpoint, :index,
lat: Float.to_string(lat),
lng: Float.to_string(lng),
details: item.id,
detailsType: type_of_item(item)
)
else
Routes.map_url(endpoint, :index, details: item.id, detailsType: type_of_item(item))
end
end
def type_of_item(%GeoItem{}) do
"geo_item"
end
def type_of_item(%NewsItem{}) do
"news_item"
end
end
| 23.410714 | 89 | 0.633105 |
7304f4c7a4f7723ad4b80a88ae0e57711d825488 | 158 | ex | Elixir | priv/templates/potion.gen.gql_for_model/model_mock.ex | shuv1824/potionx | a5888413b13a520d8ddf79fb26b7483e441737c3 | [
"MIT"
] | 31 | 2021-02-16T20:50:46.000Z | 2022-02-03T10:38:07.000Z | priv/templates/potion.gen.gql_for_model/model_mock.ex | shuv1824/potionx | a5888413b13a520d8ddf79fb26b7483e441737c3 | [
"MIT"
] | 6 | 2021-04-07T21:50:20.000Z | 2022-02-06T21:54:04.000Z | priv/templates/potion.gen.gql_for_model/model_mock.ex | shuv1824/potionx | a5888413b13a520d8ddf79fb26b7483e441737c3 | [
"MIT"
] | 4 | 2021-03-25T17:59:44.000Z | 2021-04-25T16:28:22.000Z | defmodule <%= module_name_data %>.<%= context_name %>.<%= model_name %>Mock do
def run do
<%= mock %>
end
def run_patch do
<%= mock_patch %>
end
end
| 15.8 | 78 | 0.620253 |
7304f8233075ca47ea5c47598594191ba90261ab | 920 | ex | Elixir | backend/lib/getaways_web/resolvers/accounts.ex | abeyonalaja/pragstudio-unpacked-graphql-code | 1b0a79d62b624208ffc85f94c1d59ca3d3dab044 | [
"Unlicense"
] | null | null | null | backend/lib/getaways_web/resolvers/accounts.ex | abeyonalaja/pragstudio-unpacked-graphql-code | 1b0a79d62b624208ffc85f94c1d59ca3d3dab044 | [
"Unlicense"
] | 2 | 2020-07-17T17:34:46.000Z | 2021-03-09T11:31:50.000Z | backend/lib/getaways_web/resolvers/accounts.ex | abeyonalaja/pragstudio-unpacked-graphql-code | 1b0a79d62b624208ffc85f94c1d59ca3d3dab044 | [
"Unlicense"
] | null | null | null | defmodule GetawaysWeb.Resolvers.Accounts do
alias Getaways.Accounts
alias GetawaysWeb.Schema.ChangesetErrors
def signin(_, %{username: username, password: password}, _) do
case Accounts.authenticate(username, password) do
:error ->
{:error, "Whoops, invalid credentials!"}
{:ok, user} ->
token = GetawaysWeb.AuthToken.sign(user)
{:ok, %{token: token, user: user}}
end
end
def signup(_, args, _) do
case Accounts.create_user(args) do
{:error, changeset} ->
{
:error,
message: "Could not create account",
details: ChangesetErrors.error_details(changeset)
}
{:ok, user} ->
token = GetawaysWeb.AuthToken.sign(user)
{:ok, %{token: token, user: user}}
end
end
def me(_, _, %{context: %{current_user: user}}) do
{:ok, user}
end
def me(_, _, _) do
{:ok, nil}
end
end
| 23.589744 | 64 | 0.594565 |
7304ff0c38be9f967b07b0839d7482badaa285ef | 984 | ex | Elixir | lib/topo/distance.ex | codabrink/topo | f1ca4b7fe337a67285ee4c65a34fb521b119342c | [
"MIT"
] | null | null | null | lib/topo/distance.ex | codabrink/topo | f1ca4b7fe337a67285ee4c65a34fb521b119342c | [
"MIT"
] | null | null | null | lib/topo/distance.ex | codabrink/topo | f1ca4b7fe337a67285ee4c65a34fb521b119342c | [
"MIT"
] | null | null | null | defmodule Topo.Distance do
@moduledoc false
alias Topo.PointLine
alias Topo.Intersects
@type geo_struct ::
%Geo.Point{}
| %Geo.MultiPoint{}
| %Geo.LineString{}
| %Geo.MultiLineString{}
| %Geo.Polygon{}
| %Geo.MultiPolygon{}
@spec distance(geo_struct, geo_struct) :: float
def distance(%Geo.LineString{} = a, %Geo.Point{} = b),
do: PointLine.distance(a.coordinates, b.coordinates)
def distance(%Geo.Point{} = a, %Geo.LineString{} = b), do: distance(b, a)
def distance(%Geo.LineString{} = a, %Geo.LineString{} = b) do
cond do
Intersects.intersects?(a, b) ->
0.0
true ->
min(
Enum.reduce(a.coordinates, fn p, closest ->
min(closest, PointLine.distance(b.coordinates, p))
end),
Enum.reduce(b.coordinates, fn p, closest ->
min(closest, PointLine.distance(a.coordinates, p))
end)
)
end
end
end
| 25.894737 | 75 | 0.571138 |
73051e13d45de36e54fb065e4a8073e05d1c7327 | 4,005 | ex | Elixir | lib/gen_websocket.ex | ejscunha/gen_websocket | e8d88e00cce7e07079898868e6a10edfb0e3973a | [
"MIT"
] | null | null | null | lib/gen_websocket.ex | ejscunha/gen_websocket | e8d88e00cce7e07079898868e6a10edfb0e3973a | [
"MIT"
] | null | null | null | lib/gen_websocket.ex | ejscunha/gen_websocket | e8d88e00cce7e07079898868e6a10edfb0e3973a | [
"MIT"
] | null | null | null | defmodule GenWebsocket do
@moduledoc ~S"""
A Websocket client for Elixir with a API similar to `:gen_tcp`.
Example of usage where a client is started and connected, then it is used to send and receive
data:
iex> {:ok, client} = GenWebsocket.connect('example.com', 80, [])
iex> :ok = GenWebsocket.send(client, "data")
iex> {:ok, data} = GenWebsocket.recv(client, 100)
When the client has the option `:active` set to `true` or `:once` it will send to the process
that started the client or the controlling process defined with `controlling_process/2` function
messages with the format `{:websocket, pid, data}`, where `pid` is the PID of the client and
`data` is a binary with the data received from the websocket server. If the the `:active` option
was set to `:once` the client will set it to `false` after sending data once. When the option is
set to `false`, the `recv/3` function must be used to retrieve data or the option needs to be set
back to `true` or `:once` using the `set_active/2` function.
When the connection is lost unexpectedly a message is sent with the format
`{:websocket_closed, pid}`.
To close the client connection to the websocket server the function `close/1` can be used, the
connection to the websocket will be closed and the client process will be stopped.
"""
alias GenWebsocket.Client
@type reason() :: any()
@type opts() :: Keyword.t()
@doc """
Starts and connects a client to the websocket server.
The `opts` paramameter is a Keyword list that expects the follwing optional entries:
* `:transport - An atom with the transport protocol, either `:tcp` or `:tls`, defaults to `:tcp`
* `:path` - A string with the websocket server path, defaults to `"/"`
* `:headers` - A list of tuples with the HTTP headers to send to the server, defaults to `[]`
* `:active` - A boolean or to indicate if the client should send back any received data, it can
be `true`, `false` and `:once`, defaults to `false`
* `:compress` - A boolean to indicate if the data should be compressed, defaults to `true`
* `:protocols` - A list of strings indicating the protocols used by the server, defaults to `[]`
"""
@spec connect(charlist(), :inet.port_number(), opts(), timeout()) ::
{:ok, pid()} | {:error, reason()}
defdelegate connect(host, port, opts \\ [], timeout \\ 5_000), to: Client
@doc """
Sends data to the websocket server.
Data must be a binary or a list of binaries.
"""
@spec send(pid(), iodata()) :: :ok | {:error, reason()}
defdelegate send(client, data), to: Client
@doc """
When the client has the option `:active` set to `false`, the `recv/3` function can be used to
retrieve any data sent by the server.
If the provided length is `0`, it returns immediately with all data present in the client, even if
there is none. If the timeout expires it returns `{:error, :timeout}`.
"""
@spec recv(pid(), non_neg_integer(), timeout()) :: {:ok, String.t()} | {:error, reason()}
defdelegate recv(client, length, timeout \\ 5_000), to: Client
@doc """
Defines the process to which the data received by the client is sent to, when the client option
`:active` is set to `true` or `:once`.
"""
@spec controlling_process(pid(), pid()) :: :ok | {:error, reason()}
defdelegate controlling_process(client, pid), to: Client
@doc """
Closes the client connection to the websocket server and stops the client. Any data retained by
the client will be lost.
"""
@spec close(pid()) :: :ok
defdelegate close(client), to: Client
@doc """
It defines the client `:active` option.
The possible values for the `:active` options are `true`, `false` or `:once`. When the `:active`
option is set to `:once`, the client will send back the first received frame of data and set the
`:active` option to `false`.
"""
@spec set_active(pid(), boolean() | :once) :: :ok | {:error, reason()}
defdelegate set_active(client, active), to: Client
end
| 44.5 | 100 | 0.684644 |
73052766ad69f2d71806d297ae40401536e98e26 | 31,762 | ex | Elixir | lib/phoenix/router.ex | jwarwick/phoenix | 46dccaec5c7b79ab3fa863b2138d5a9eb6b4a34b | [
"MIT"
] | 1 | 2021-06-26T03:57:48.000Z | 2021-06-26T03:57:48.000Z | lib/phoenix/router.ex | jwarwick/phoenix | 46dccaec5c7b79ab3fa863b2138d5a9eb6b4a34b | [
"MIT"
] | 2 | 2022-02-19T07:30:25.000Z | 2022-02-27T14:12:26.000Z | lib/phoenix/router.ex | jwarwick/phoenix | 46dccaec5c7b79ab3fa863b2138d5a9eb6b4a34b | [
"MIT"
] | 1 | 2021-06-22T08:06:06.000Z | 2021-06-22T08:06:06.000Z | defmodule Phoenix.Router do
defmodule NoRouteError do
@moduledoc """
Exception raised when no route is found.
"""
defexception plug_status: 404, message: "no route found", conn: nil, router: nil
def exception(opts) do
conn = Keyword.fetch!(opts, :conn)
router = Keyword.fetch!(opts, :router)
path = "/" <> Enum.join(conn.path_info, "/")
%NoRouteError{message: "no route found for #{conn.method} #{path} (#{inspect router})",
conn: conn, router: router}
end
end
defmodule MalformedURIError do
@moduledoc """
Exception raised when the URI is malformed on matching.
"""
defexception [:message, plug_status: 400]
end
@moduledoc """
Defines a Phoenix router.
The router provides a set of macros for generating routes
that dispatch to specific controllers and actions. Those
macros are named after HTTP verbs. For example:
defmodule MyAppWeb.Router do
use Phoenix.Router
get "/pages/:page", PageController, :show
end
The `get/3` macro above accepts a request to `/pages/hello` and dispatches
it to `PageController`'s `show` action with `%{"page" => "hello"}` in
`params`.
Phoenix's router is extremely efficient, as it relies on Elixir
pattern matching for matching routes and serving requests.
## Routing
`get/3`, `post/3`, `put/3` and other macros named after HTTP verbs are used
to create routes.
The route:
get "/pages", PageController, :index
matches a `GET` request to `/pages` and dispatches it to the `index` action in
`PageController`.
get "/pages/:page", PageController, :show
matches `/pages/hello` and dispatches to the `show` action with
`%{"page" => "hello"}` in `params`.
defmodule PageController do
def show(conn, params) do
# %{"page" => "hello"} == params
end
end
Partial and multiple segments can be matched. For example:
get "/api/v:version/pages/:id", PageController, :show
matches `/api/v1/pages/2` and puts `%{"version" => "1", "id" => "2"}` in
`params`. Only the trailing part of a segment can be captured.
Routes are matched from top to bottom. The second route here:
get "/pages/:page", PageController, :show
get "/pages/hello", PageController, :hello
will never match `/pages/hello` because `/pages/:page` matches that first.
Routes can use glob-like patterns to match trailing segments.
get "/pages/*page", PageController, :show
matches `/pages/hello/world` and puts the globbed segments in `params["page"]`.
GET /pages/hello/world
%{"page" => ["hello", "world"]} = params
Globs can match segments partially too. The difference is the whole segment
is captured along with the trailing segments.
get "/pages/he*page", PageController, :show
matches
GET /pages/hello/world
%{"page" => ["hello", "world"]} = params
GET /pages/hey/world
%{"page" => ["hey", "world"]} = params
## Helpers
Phoenix automatically generates a module `Helpers` inside your router
which contains named helpers to help developers generate and keep
their routes up to date.
Helpers are automatically generated based on the controller name.
For example, the route:
get "/pages/:page", PageController, :show
will generate the following named helper:
MyAppWeb.Router.Helpers.page_path(conn_or_endpoint, :show, "hello")
"/pages/hello"
MyAppWeb.Router.Helpers.page_path(conn_or_endpoint, :show, "hello", some: "query")
"/pages/hello?some=query"
MyAppWeb.Router.Helpers.page_url(conn_or_endpoint, :show, "hello")
"http://example.com/pages/hello"
MyAppWeb.Router.Helpers.page_url(conn_or_endpoint, :show, "hello", some: "query")
"http://example.com/pages/hello?some=query"
If the route contains glob-like patterns, parameters for those have to be given as
list:
MyAppWeb.Router.Helpers.page_path(conn_or_endpoint, :show, ["hello", "world"])
"/pages/hello/world"
The URL generated in the named URL helpers is based on the configuration for
`:url`, `:http` and `:https`. However, if for some reason you need to manually
control the URL generation, the url helpers also allow you to pass in a `URI`
struct:
uri = %URI{scheme: "https", host: "other.example.com"}
MyAppWeb.Router.Helpers.page_url(uri, :show, "hello")
"https://other.example.com/pages/hello"
The named helper can also be customized with the `:as` option. Given
the route:
get "/pages/:page", PageController, :show, as: :special_page
the named helper will be:
MyAppWeb.Router.Helpers.special_page_path(conn, :show, "hello")
"/pages/hello"
## Scopes and Resources
It is very common in Phoenix applications to namespace all of your
routes under the application scope:
scope "/", MyAppWeb do
get "/pages/:id", PageController, :show
end
The route above will dispatch to `MyAppWeb.PageController`. This syntax
is not only convenient for developers, since we don't have to repeat
the `MyAppWeb.` prefix on all routes, but it also allows Phoenix to put
less pressure on the Elixir compiler. If instead we had written:
get "/pages/:id", MyAppWeb.PageController, :show
The Elixir compiler would infer that the router depends directly on
`MyAppWeb.PageController`, which is not true. By using scopes, Phoenix
can properly hint to the Elixir compiler the controller is not an
actual dependency of the router. This provides more efficient
compilation times.
Scopes allow us to scope on any path or even on the helper name:
scope "/api/v1", MyAppWeb, as: :api_v1 do
get "/pages/:id", PageController, :show
end
For example, the route above will match on the path `"/api/v1/pages/:id"`
and the named route will be `api_v1_page_path`, as expected from the
values given to `scope/2` option.
Phoenix also provides a `resources/4` macro that allows developers
to generate "RESTful" routes to a given resource:
defmodule MyAppWeb.Router do
use Phoenix.Router
resources "/pages", PageController, only: [:show]
resources "/users", UserController, except: [:delete]
end
Finally, Phoenix ships with a `mix phx.routes` task that nicely
formats all routes in a given router. We can use it to verify all
routes included in the router above:
$ mix phx.routes
page_path GET /pages/:id PageController.show/2
user_path GET /users UserController.index/2
user_path GET /users/:id/edit UserController.edit/2
user_path GET /users/new UserController.new/2
user_path GET /users/:id UserController.show/2
user_path POST /users UserController.create/2
user_path PATCH /users/:id UserController.update/2
PUT /users/:id UserController.update/2
One can also pass a router explicitly as an argument to the task:
$ mix phx.routes MyAppWeb.Router
Check `scope/2` and `resources/4` for more information.
## Pipelines and plugs
Once a request arrives at the Phoenix router, it performs
a series of transformations through pipelines until the
request is dispatched to a desired end-point.
Such transformations are defined via plugs, as defined
in the [Plug](http://github.com/elixir-lang/plug) specification.
Once a pipeline is defined, it can be piped through per scope.
For example:
defmodule MyAppWeb.Router do
use Phoenix.Router
pipeline :browser do
plug :fetch_session
plug :accepts, ["html"]
end
scope "/" do
pipe_through :browser
# browser related routes and resources
end
end
`Phoenix.Router` imports functions from both `Plug.Conn` and `Phoenix.Controller`
to help define plugs. In the example above, `fetch_session/2`
comes from `Plug.Conn` while `accepts/2` comes from `Phoenix.Controller`.
Note that router pipelines are only invoked after a route is found.
No plug is invoked in case no matches were found.
"""
alias Phoenix.Router.{Resource, Scope, Route, Helpers}
@http_methods [:get, :post, :put, :patch, :delete, :options, :connect, :trace, :head]
@doc false
defmacro __using__(opts) do
quote do
unquote(prelude(opts))
unquote(defs())
unquote(match_dispatch())
end
end
defp prelude(opts) do
quote do
@helpers_moduledoc Keyword.get(unquote(opts), :helpers_moduledoc, true)
Module.register_attribute __MODULE__, :phoenix_routes, accumulate: true
@phoenix_forwards %{}
import Phoenix.Router
# TODO v2: No longer automatically import dependencies
import Plug.Conn
import Phoenix.Controller
# Set up initial scope
@phoenix_pipeline nil
Phoenix.Router.Scope.init(__MODULE__)
@before_compile unquote(__MODULE__)
end
end
# Because those macros are executed multiple times,
# we end-up generating a huge scope that drastically
# affects compilation. We work around it by defining
# those functions only once and calling it over and
# over again.
defp defs() do
quote unquote: false do
var!(add_resources, Phoenix.Router) = fn resource ->
path = resource.path
ctrl = resource.controller
opts = resource.route
if resource.singleton do
Enum.each resource.actions, fn
:show -> get path, ctrl, :show, opts
:new -> get path <> "/new", ctrl, :new, opts
:edit -> get path <> "/edit", ctrl, :edit, opts
:create -> post path, ctrl, :create, opts
:delete -> delete path, ctrl, :delete, opts
:update ->
patch path, ctrl, :update, opts
put path, ctrl, :update, Keyword.put(opts, :as, nil)
end
else
param = resource.param
Enum.each resource.actions, fn
:index -> get path, ctrl, :index, opts
:show -> get path <> "/:" <> param, ctrl, :show, opts
:new -> get path <> "/new", ctrl, :new, opts
:edit -> get path <> "/:" <> param <> "/edit", ctrl, :edit, opts
:create -> post path, ctrl, :create, opts
:delete -> delete path <> "/:" <> param, ctrl, :delete, opts
:update ->
patch path <> "/:" <> param, ctrl, :update, opts
put path <> "/:" <> param, ctrl, :update, Keyword.put(opts, :as, nil)
end
end
end
end
end
@doc false
def __call__(
%{private: %{phoenix_router: router, phoenix_bypass: {router, pipes}}} = conn,
{metadata, prepare, pipeline, _}
) do
conn = prepare.(conn, metadata)
case pipes do
:current -> pipeline.(conn)
_ -> Enum.reduce(pipes, conn, fn pipe, acc -> apply(router, pipe, [acc, []]) end)
end
end
def __call__(%{private: %{phoenix_bypass: :all}} = conn, {metadata, prepare, _, _}) do
prepare.(conn, metadata)
end
def __call__(conn, {metadata, prepare, pipeline, {plug, opts}}) do
conn = prepare.(conn, metadata)
start = System.monotonic_time()
metadata = %{metadata | conn: conn}
:telemetry.execute([:phoenix, :router_dispatch, :start], %{system_time: System.system_time()}, metadata)
case pipeline.(conn) do
%Plug.Conn{halted: true} = halted_conn ->
measurements = %{duration: System.monotonic_time() - start}
metadata = %{metadata | conn: halted_conn}
:telemetry.execute([:phoenix, :router_dispatch, :stop], measurements, metadata)
halted_conn
%Plug.Conn{} = piped_conn ->
try do
plug.call(piped_conn, plug.init(opts))
else
conn ->
measurements = %{duration: System.monotonic_time() - start}
metadata = %{metadata | conn: conn}
:telemetry.execute([:phoenix, :router_dispatch, :stop], measurements, metadata)
conn
rescue
e in Plug.Conn.WrapperError ->
measurements = %{duration: System.monotonic_time() - start}
metadata = Map.merge(metadata, %{conn: conn, kind: :error, reason: e, stacktrace: __STACKTRACE__})
:telemetry.execute([:phoenix, :router_dispatch, :exception], measurements, metadata)
Plug.Conn.WrapperError.reraise(e)
catch
kind, reason ->
measurements = %{duration: System.monotonic_time() - start}
metadata = Map.merge(metadata, %{conn: conn, kind: kind, reason: reason, stacktrace: __STACKTRACE__})
:telemetry.execute([:phoenix, :router_dispatch, :exception], measurements, metadata)
Plug.Conn.WrapperError.reraise(piped_conn, kind, reason, __STACKTRACE__)
end
end
end
defp match_dispatch() do
quote location: :keep do
@behaviour Plug
@doc """
Callback required by Plug that initializes the router
for serving web requests.
"""
def init(opts) do
opts
end
@doc """
Callback invoked by Plug on every request.
"""
def call(conn, _opts) do
%{method: method, path_info: path_info, host: host} = conn = prepare(conn)
decoded =
try do
Enum.map(path_info, &URI.decode/1)
rescue
ArgumentError ->
raise MalformedURIError, "malformed URI path: #{inspect conn.request_path}"
end
case __match_route__(method, decoded, host) do
:error -> raise NoRouteError, conn: conn, router: __MODULE__
match -> Phoenix.Router.__call__(conn, match)
end
end
defoverridable [init: 1, call: 2]
end
end
@doc false
defmacro __before_compile__(env) do
routes = env.module |> Module.get_attribute(:phoenix_routes) |> Enum.reverse
routes_with_exprs = Enum.map(routes, &{&1, Route.exprs(&1)})
helpers_moduledoc = Module.get_attribute(env.module, :helpers_moduledoc)
Helpers.define(env, routes_with_exprs, docs: helpers_moduledoc)
{matches, _} = Enum.map_reduce(routes_with_exprs, %{}, &build_match/2)
checks =
for %{line: line, plug: plug, plug_opts: plug_opts} <- routes, into: %{} do
quote line: line do
{unquote(plug).init(unquote(Macro.escape(plug_opts))), []}
end
end
match_404 =
quote [generated: true] do
def __match_route__(_method, _path_info, _host) do
:error
end
end
keys = [:verb, :path, :plug, :plug_opts, :helper, :metadata]
routes = Enum.map(routes, &Map.take(&1, keys))
quote do
@doc false
def __routes__, do: unquote(Macro.escape(routes))
@doc false
def __checks__, do: unquote({:__block__, [], Map.keys(checks)})
@doc false
def __helpers__, do: __MODULE__.Helpers
defp prepare(conn) do
merge_private(
conn,
[
{:phoenix_router, __MODULE__},
{__MODULE__, {conn.script_name, @phoenix_forwards}}
]
)
end
unquote(matches)
unquote(match_404)
end
end
defp build_match({route, exprs}, known_pipelines) do
%{pipe_through: pipe_through} = route
%{
prepare: prepare,
dispatch: dispatch,
verb_match: verb_match,
path_params: path_params,
path: path,
host: host
} = exprs
{pipe_name, pipe_definition, known_pipelines} =
case known_pipelines do
%{^pipe_through => name} ->
{name, :ok, known_pipelines}
%{} ->
name = :"__pipe_through#{map_size(known_pipelines)}__"
{name, build_pipes(name, pipe_through), Map.put(known_pipelines, pipe_through, name)}
end
quoted =
quote line: route.line do
unquote(pipe_definition)
@doc false
def __match_route__(unquote(verb_match), unquote(path), unquote(host)) do
{unquote(build_metadata(route, path_params)),
fn var!(conn, :conn), %{path_params: var!(path_params, :conn)} -> unquote(prepare) end,
&unquote(Macro.var(pipe_name, __MODULE__))/1,
unquote(dispatch)}
end
end
{quoted, known_pipelines}
end
defp build_metadata(route, path_params) do
%{
path: path,
plug: plug,
plug_opts: plug_opts,
pipe_through: pipe_through,
metadata: metadata
} = route
pairs = [
conn: nil,
route: path,
plug: plug,
plug_opts: Macro.escape(plug_opts),
path_params: path_params,
pipe_through: pipe_through
]
{:%{}, [], pairs ++ Macro.escape(Map.to_list(metadata))}
end
defp build_pipes(name, []) do
quote do
defp unquote(name)(conn), do: conn
end
end
defp build_pipes(name, pipe_through) do
plugs = pipe_through |> Enum.reverse |> Enum.map(&{&1, [], true})
{conn, body} = Plug.Builder.compile(__ENV__, plugs, init_mode: Phoenix.plug_init_mode(), log_on_halt: :debug)
quote do
defp unquote(name)(unquote(conn)), do: unquote(body)
end
end
@doc """
Generates a route match based on an arbitrary HTTP method.
Useful for defining routes not included in the builtin macros.
The catch-all verb, `:*`, may also be used to match all HTTP methods.
## Options
* `:as` - configures the named helper exclusively. If false, does not generate
a helper.
* `:alias` - configure if the scope alias should be applied to the route.
Defaults to true, disables scoping if false.
* `:log` - the level to log the route dispatching under,
may be set to false. Defaults to `:debug`
* `:host` - a string containing the host scope, or prefix host scope,
ie `"foo.bar.com"`, `"foo."`
* `:private` - a map of private data to merge into the connection
when a route matches
* `:assigns` - a map of data to merge into the connection when a route matches
* `:metadata` - a map of metadata used by the telemetry events and returned by
`route_info/4`
* `:trailing_slash` - a boolean to flag whether or not the helper functions
append a trailing slash. Defaults to `false`.
## Examples
match(:move, "/events/:id", EventController, :move)
match(:*, "/any", SomeController, :any)
"""
defmacro match(verb, path, plug, plug_opts, options \\ []) do
add_route(:match, verb, path, plug, plug_opts, options)
end
for verb <- @http_methods do
@doc """
Generates a route to handle a #{verb} request to the given path.
#{verb}("/events/:id", EventController, :action)
See `match/5` for options.
"""
defmacro unquote(verb)(path, plug, plug_opts, options \\ []) do
add_route(:match, unquote(verb), path, plug, plug_opts, options)
end
end
defp add_route(kind, verb, path, plug, plug_opts, options) do
quote do
@phoenix_routes Scope.route(
__ENV__.line,
__ENV__.module,
unquote(kind),
unquote(verb),
unquote(path),
unquote(plug),
unquote(plug_opts),
unquote(options)
)
end
end
@doc """
Defines a plug pipeline.
Pipelines are defined at the router root and can be used
from any scope.
## Examples
pipeline :api do
plug :token_authentication
plug :dispatch
end
A scope may then use this pipeline as:
scope "/" do
pipe_through :api
end
Every time `pipe_through/1` is called, the new pipelines
are appended to the ones previously given.
"""
defmacro pipeline(plug, do: block) do
with true <- is_atom(plug),
imports = __CALLER__.macros ++ __CALLER__.functions,
{mod, _} <- Enum.find(imports, fn {_, imports} -> {plug, 2} in imports end) do
raise ArgumentError,
"cannot define pipeline named #{inspect(plug)} " <>
"because there is an import from #{inspect(mod)} with the same name"
end
block =
quote do
plug = unquote(plug)
@phoenix_pipeline []
unquote(block)
end
compiler =
quote unquote: false do
Scope.pipeline(__MODULE__, plug)
{conn, body} = Plug.Builder.compile(__ENV__, @phoenix_pipeline,
init_mode: Phoenix.plug_init_mode())
def unquote(plug)(unquote(conn), _) do
try do
unquote(body)
rescue
e in Plug.Conn.WrapperError ->
Plug.Conn.WrapperError.reraise(e)
catch
:error, reason ->
Plug.Conn.WrapperError.reraise(unquote(conn), :error, reason, __STACKTRACE__)
end
end
@phoenix_pipeline nil
end
quote do
try do
unquote(block)
unquote(compiler)
after
:ok
end
end
end
@doc """
Defines a plug inside a pipeline.
See `pipeline/2` for more information.
"""
defmacro plug(plug, opts \\ []) do
plug = Macro.expand(plug, %{__CALLER__ | function: {:init, 1}})
quote do
if pipeline = @phoenix_pipeline do
@phoenix_pipeline [{unquote(plug), unquote(opts), true}|pipeline]
else
raise "cannot define plug at the router level, plug must be defined inside a pipeline"
end
end
end
@doc """
Defines a list of plugs (and pipelines) to send the connection through.
See `pipeline/2` for more information.
"""
defmacro pipe_through(pipes) do
quote do
if pipeline = @phoenix_pipeline do
raise "cannot pipe_through inside a pipeline"
else
Scope.pipe_through(__MODULE__, unquote(pipes))
end
end
end
@doc """
Defines "RESTful" routes for a resource.
The given definition:
resources "/users", UserController
will include routes to the following actions:
* `GET /users` => `:index`
* `GET /users/new` => `:new`
* `POST /users` => `:create`
* `GET /users/:id` => `:show`
* `GET /users/:id/edit` => `:edit`
* `PATCH /users/:id` => `:update`
* `PUT /users/:id` => `:update`
* `DELETE /users/:id` => `:delete`
## Options
This macro accepts a set of options:
* `:only` - a list of actions to generate routes for, for example: `[:show, :edit]`
* `:except` - a list of actions to exclude generated routes from, for example: `[:delete]`
* `:param` - the name of the parameter for this resource, defaults to `"id"`
* `:name` - the prefix for this resource. This is used for the named helper
and as the prefix for the parameter in nested resources. The default value
is automatically derived from the controller name, i.e. `UserController` will
have name `"user"`
* `:as` - configures the named helper exclusively
* `:singleton` - defines routes for a singleton resource that is looked up by
the client without referencing an ID. Read below for more information
## Singleton resources
When a resource needs to be looked up without referencing an ID, because
it contains only a single entry in the given context, the `:singleton`
option can be used to generate a set of routes that are specific to
such single resource:
* `GET /user` => `:show`
* `GET /user/new` => `:new`
* `POST /user` => `:create`
* `GET /user/edit` => `:edit`
* `PATCH /user` => `:update`
* `PUT /user` => `:update`
* `DELETE /user` => `:delete`
Usage example:
resources "/account", AccountController, only: [:show], singleton: true
## Nested Resources
This macro also supports passing a nested block of route definitions.
This is helpful for nesting children resources within their parents to
generate nested routes.
The given definition:
resources "/users", UserController do
resources "/posts", PostController
end
will include the following routes:
user_post_path GET /users/:user_id/posts PostController :index
user_post_path GET /users/:user_id/posts/:id/edit PostController :edit
user_post_path GET /users/:user_id/posts/new PostController :new
user_post_path GET /users/:user_id/posts/:id PostController :show
user_post_path POST /users/:user_id/posts PostController :create
user_post_path PATCH /users/:user_id/posts/:id PostController :update
PUT /users/:user_id/posts/:id PostController :update
user_post_path DELETE /users/:user_id/posts/:id PostController :delete
"""
defmacro resources(path, controller, opts, do: nested_context) do
add_resources path, controller, opts, do: nested_context
end
@doc """
See `resources/4`.
"""
defmacro resources(path, controller, do: nested_context) do
add_resources path, controller, [], do: nested_context
end
defmacro resources(path, controller, opts) do
add_resources path, controller, opts, do: nil
end
@doc """
See `resources/4`.
"""
defmacro resources(path, controller) do
add_resources path, controller, [], do: nil
end
defp add_resources(path, controller, options, do: context) do
scope =
if context do
quote do
scope resource.member, do: unquote(context)
end
end
quote do
resource = Resource.build(unquote(path), unquote(controller), unquote(options))
var!(add_resources, Phoenix.Router).(resource)
unquote(scope)
end
end
@doc """
Defines a scope in which routes can be nested.
## Examples
scope path: "/api/v1", as: :api_v1, alias: API.V1 do
get "/pages/:id", PageController, :show
end
The generated route above will match on the path `"/api/v1/pages/:id"`
and will dispatch to `:show` action in `API.V1.PageController`. A named
helper `api_v1_page_path` will also be generated.
## Options
The supported options are:
* `:path` - a string containing the path scope.
* `:as` - a string or atom containing the named helper scope. When set to
false, it resets the nested helper scopes.
* `:alias` - an alias (atom) containing the controller scope. When set to
false, it resets all nested aliases.
* `:host` - a string containing the host scope, or prefix host scope,
ie `"foo.bar.com"`, `"foo."`
* `:private` - a map of private data to merge into the connection when a route matches
* `:assigns` - a map of data to merge into the connection when a route matches
* `:log` - the level to log the route dispatching under,
may be set to false. Defaults to `:debug`
* `:trailing_slash` - whether or not the helper functions append a trailing
slash. Defaults to `false`.
"""
defmacro scope(options, do: context) do
do_scope(options, context)
end
@doc """
Define a scope with the given path.
This function is a shortcut for:
scope path: path do
...
end
## Examples
scope "/api/v1", as: :api_v1 do
get "/pages/:id", PageController, :show
end
"""
defmacro scope(path, options, do: context) do
options = Macro.expand(options, %{__CALLER__ | function: {:init, 1}})
options = quote do
path = unquote(path)
case unquote(options) do
alias when is_atom(alias) -> [path: path, alias: alias]
options when is_list(options) -> Keyword.put(options, :path, path)
end
end
do_scope(options, context)
end
@doc """
Defines a scope with the given path and alias.
This function is a shortcut for:
scope path: path, alias: alias do
...
end
## Examples
scope "/api/v1", API.V1, as: :api_v1 do
get "/pages/:id", PageController, :show
end
"""
defmacro scope(path, alias, options, do: context) do
alias = Macro.expand(alias, %{__CALLER__ | function: {:init, 1}})
options = quote do
unquote(options)
|> Keyword.put(:path, unquote(path))
|> Keyword.put(:alias, unquote(alias))
end
do_scope(options, context)
end
defp do_scope(options, context) do
quote do
Scope.push(__MODULE__, unquote(options))
try do
unquote(context)
after
Scope.pop(__MODULE__)
end
end
end
@doc """
Returns the full alias with the current scope's aliased prefix.
Useful for applying the same short-hand alias handling to
other values besides the second argument in route definitions.
## Examples
scope "/", MyPrefix do
get "/", ProxyPlug, controller: scoped_alias(__MODULE__, MyController)
end
"""
def scoped_alias(router_module, alias) do
Scope.expand_alias(router_module, alias)
end
@doc """
Forwards a request at the given path to a plug.
All paths that match the forwarded prefix will be sent to
the forwarded plug. This is useful for sharing a router between
applications or even breaking a big router into smaller ones.
The router pipelines will be invoked prior to forwarding the
connection.
However, we don't advise forwarding to another endpoint.
The reason is that plugs defined by your app and the forwarded
endpoint would be invoked twice, which may lead to errors.
## Examples
scope "/", MyApp do
pipe_through [:browser, :admin]
forward "/admin", SomeLib.AdminDashboard
forward "/api", ApiRouter
end
"""
defmacro forward(path, plug, plug_opts \\ [], router_opts \\ []) do
plug = Macro.expand(plug, %{__CALLER__ | function: {:init, 1}})
router_opts = Keyword.put(router_opts, :as, nil)
quote unquote: true, bind_quoted: [path: path, plug: plug] do
plug = Scope.register_forwards(__MODULE__, path, plug)
unquote(add_route(:forward, :*, path, plug, plug_opts, router_opts))
end
end
@doc """
Returns all routes information from the given router.
"""
def routes(router) do
router.__routes__()
end
@doc """
Returns the compile-time route info and runtime path params for a request.
The `path` can be either a string or the `path_info` segments.
A map of metadata is returned with the following keys:
* `:log` - the configured log level. For example `:debug`
* `:path_params` - the map of runtime path params
* `:pipe_through` - the list of pipelines for the route's scope, for example `[:browser]`
* `:plug` - the plug to dispatch the route to, for example `AppWeb.PostController`
* `:plug_opts` - the options to pass when calling the plug, for example: `:index`
* `:route` - the string route pattern, such as `"/posts/:id"`
## Examples
iex> Phoenix.Router.route_info(AppWeb.Router, "GET", "/posts/123", "myhost")
%{
log: :debug,
path_params: %{"id" => "123"},
pipe_through: [:browser],
plug: AppWeb.PostController,
plug_opts: :show,
route: "/posts/:id",
}
iex> Phoenix.Router.route_info(MyRouter, "GET", "/not-exists", "myhost")
:error
"""
def route_info(router, method, path, host) when is_binary(path) do
split_path = for segment <- String.split(path, "/"), segment != "", do: segment
route_info(router, method, split_path, host)
end
def route_info(router, method, split_path, host) when is_list(split_path) do
case router.__match_route__(method, split_path, host) do
{%{} = metadata, _prepare, _pipeline, {_plug, _opts}} -> Map.delete(metadata, :conn)
:error -> :error
end
end
end
| 31.200393 | 113 | 0.633997 |
730535ff00ae395aeebe712b2cf29aee32c9bbd4 | 1,015 | exs | Elixir | test/aws/xml_test.exs | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | test/aws/xml_test.exs | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | test/aws/xml_test.exs | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | defmodule AWS.XMLTest do
use ExUnit.Case
@text "__text"
test "decode!/2 handles lists correctly by merging values in a list" do
expected = %{"person" => %{"name" => "foo", "addresses" => %{"address" => ["1", "2"]}}}
input = """
<person>
<name>foo</name>
<addresses>
<address>1</address>
<address>2</address>
</addresses>
</person>
"""
assert ^expected = AWS.XML.decode!(input)
end
test "decode!/1 handles multiple text elments mixed with other elements correctly" do
expected = %{"person" => %{"name" => "foo", @text => "random"}}
input = """
<person>
<name>foo</name>
random
</person>
"""
assert ^expected = AWS.XML.decode!(input)
expected = %{"person" => %{"name" => "foo", "age" => "42", @text => "random\n \n text"}}
input = """
<person>
<name>foo</name>
random
<age>42</age>
text
</person>
"""
assert ^expected = AWS.XML.decode!(input)
end
end
| 21.145833 | 94 | 0.53202 |
73053dc3a5fd7bb773fdc6137604dc4e2f925dcc | 3,212 | ex | Elixir | lib/bitcoin_simulator/bitcoin_core/network.ex | sidharth-shridhar/Bitcoin-Miner-Simulation | 2789dc8fe5f65269789540f675fac682e431e518 | [
"MIT"
] | null | null | null | lib/bitcoin_simulator/bitcoin_core/network.ex | sidharth-shridhar/Bitcoin-Miner-Simulation | 2789dc8fe5f65269789540f675fac682e431e518 | [
"MIT"
] | null | null | null | lib/bitcoin_simulator/bitcoin_core/network.ex | sidharth-shridhar/Bitcoin-Miner-Simulation | 2789dc8fe5f65269789540f675fac682e431e518 | [
"MIT"
] | null | null | null | defmodule BitcoinSimulator.BitcoinCore.Network do
use Timex
alias BitcoinSimulator.BitcoinCore.Blockchain
alias BitcoinSimulator.Simulation.Tracker
alias BitcoinSimulator.Const
defmodule MessageRecord do
defstruct [
transactions: Map.new(),
blocks: Map.new()
]
end
# APIs
def get_new_message_record, do: %MessageRecord{}
def get_initial_neighbors(id), do: GenServer.call(Tracker, {:peer_join, id})
def get_initial_blockchain(neighbors) do
if MapSet.size(neighbors) != 0 do
random_peer = neighbors |> MapSet.to_list() |> Enum.random()
GenServer.call({:via, Registry, {BitcoinSimulator.Registry, "peer_#{random_peer}"}}, :request_blockchain)
else
Blockchain.get_new_blockchain()
end
end
def exchange_neighbors(neighbors) do
Enum.each(MapSet.to_list(neighbors), fn(x) -> GenServer.cast({:via, Registry, {BitcoinSimulator.Registry, "peer_#{x}"}}, {:exchange_neighbors, neighbors}) end)
end
def mix_neighbors(neighbors, self_id) do
neighbors = MapSet.delete(neighbors, self_id)
neighbor_count = Const.decode(:neighbor_count)
if MapSet.size(neighbors) < neighbor_count do
neighbors
else
random_peer(neighbors, MapSet.new(), neighbor_count)
end
end
def message_seen?(record, type, hash) do
case type do
:transaction ->
if Map.has_key?(record.transactions, hash), do: true, else: false
:block ->
if Map.has_key?(record.blocks, hash), do: true, else: false
end
end
def saw_message(record, type, hash) do
case type do
:transaction ->
%{record | transactions: Map.put(record.transactions, hash, Timex.now())}
:block ->
%{record | blocks: Map.put(record.blocks, hash, Timex.now())}
end
end
def clean_message_record(record) do
tx_list = Map.to_list(record.transactions)
block_list = Map.to_list(record.blocks)
current_time = Timex.now()
ttl = Const.decode(:network_message_record_ttl)
drop_tx = Enum.reduce(tx_list, [], fn(x, acc) ->
if Timex.diff(current_time, elem(x, 1), :milliseconds) > ttl, do: [elem(x, 0) | acc], else: acc
end)
drop_block = Enum.reduce(block_list, [], fn(x, acc) ->
if Timex.diff(current_time, elem(x, 1), :milliseconds) > ttl, do: [elem(x, 0) | acc], else: acc
end)
%{record |
transactions: record.transactions |> Map.drop(drop_tx),
blocks: record.blocks |> Map.drop(drop_block)
}
end
def broadcast_message(type, message, neighbors, sender) do
case type do
:transaction ->
Enum.each(MapSet.to_list(neighbors), fn(x) ->
GenServer.cast({:via, Registry, {BitcoinSimulator.Registry, "peer_#{x}"}}, {:transaction, message, sender})
end)
:block ->
Enum.each(MapSet.to_list(neighbors), fn(x) ->
GenServer.cast({:via, Registry, {BitcoinSimulator.Registry, "peer_#{x}"}}, {:block, message, sender})
end)
end
end
# Aux
defp random_peer(set, result, target_count) do
result = MapSet.put(result, set |> MapSet.to_list() |> Enum.random())
if MapSet.size(result) < target_count, do: random_peer(set, result, target_count), else: result
end
end
| 31.80198 | 163 | 0.665318 |
730546d76ef6c375430bdb69677bc71cc7d64de1 | 987 | ex | Elixir | lib/rockelivery/order/order.ex | joabehenrique/rockelivery | 02f83fe99df8e27d4e2a6270fecb39a521f2ceaf | [
"MIT"
] | null | null | null | lib/rockelivery/order/order.ex | joabehenrique/rockelivery | 02f83fe99df8e27d4e2a6270fecb39a521f2ceaf | [
"MIT"
] | null | null | null | lib/rockelivery/order/order.ex | joabehenrique/rockelivery | 02f83fe99df8e27d4e2a6270fecb39a521f2ceaf | [
"MIT"
] | null | null | null | defmodule Rockelivery.Order.Order do
use Ecto.Schema
import Ecto.Changeset
alias Rockelivery.Item.Item
alias Rockelivery.User.User
@primary_key {:id, :binary_id, autogenerate: true}
@foreign_key_type :binary_id
@required_params [:address, :comments, :payment_method, :user_id]
@payment_categories [:money, :credit_card, :debit_card]
@derive {Jason.Encoder, only: [:id, :address, :comments, :payment_method, :user_id, :items]}
schema "orders" do
field(:address, :string)
field(:comments, :string)
field(:payment_method, Ecto.Enum, values: @payment_categories)
many_to_many(:items, Item, join_through: "orders_items")
belongs_to(:user, User)
timestamps()
end
def changeset(struct \\ %__MODULE__{}, params, items) do
struct
|> cast(params, @required_params)
|> validate_required(@required_params)
|> put_assoc(:items, items)
|> validate_length(:address, min: 10)
|> validate_length(:comments, min: 6)
end
end
| 28.2 | 94 | 0.705167 |
73056002a5c0b4c785089b5aa1f6d2bd61abb5b8 | 503 | exs | Elixir | priv/repo/migrations/20170227040522_rename_user_food_packages_to_user_orders.exs | neilfulwiler/open_pantry | 4b705f2282c7b2365a784503c9f1bdd34c741798 | [
"MIT"
] | 41 | 2017-10-04T00:33:46.000Z | 2021-04-09T01:33:34.000Z | priv/repo/migrations/20170227040522_rename_user_food_packages_to_user_orders.exs | openpantry/open_pantry | 27d898a65dd6f44b325f48d41bc448bb486d9c6f | [
"MIT"
] | 74 | 2017-09-20T03:36:17.000Z | 2018-11-20T20:46:16.000Z | priv/repo/migrations/20170227040522_rename_user_food_packages_to_user_orders.exs | neilfulwiler/open_pantry | 4b705f2282c7b2365a784503c9f1bdd34c741798 | [
"MIT"
] | 12 | 2017-10-04T10:02:49.000Z | 2021-12-28T22:57:20.000Z | defmodule OpenPantry.Repo.Migrations.RenameUserFoodPackagesToUserOrders do
use Ecto.Migration
def change do
rename table(:user_food_packages), to: table(:user_orders)
drop index(:stock_distributions, [:user_food_package_id, :stock_id], unique: true, name: :unique_stock_per_package)
rename table(:stock_distributions), :user_food_package_id, to: :user_order_id
create index(:stock_distributions, [:user_order_id, :stock_id], unique: true, name: :unique_stock_per_package)
end
end
| 45.727273 | 119 | 0.789264 |
730567507f9c8635bf7d6076bf30967a1387b935 | 1,655 | ex | Elixir | clients/display_video/lib/google_api/display_video/v1/model/asset_association.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/display_video/lib/google_api/display_video/v1/model/asset_association.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/display_video/lib/google_api/display_video/v1/model/asset_association.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DisplayVideo.V1.Model.AssetAssociation do
@moduledoc """
Asset association for the creative.
## Attributes
* `asset` (*type:* `GoogleApi.DisplayVideo.V1.Model.Asset.t`, *default:* `nil`) - The associated asset.
* `role` (*type:* `String.t`, *default:* `nil`) - The role of this asset for the creative.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:asset => GoogleApi.DisplayVideo.V1.Model.Asset.t() | nil,
:role => String.t() | nil
}
field(:asset, as: GoogleApi.DisplayVideo.V1.Model.Asset)
field(:role)
end
defimpl Poison.Decoder, for: GoogleApi.DisplayVideo.V1.Model.AssetAssociation do
def decode(value, options) do
GoogleApi.DisplayVideo.V1.Model.AssetAssociation.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DisplayVideo.V1.Model.AssetAssociation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.1 | 107 | 0.726284 |
73057e400d1273499b3493d39f15c12eaade8d20 | 1,224 | ex | Elixir | testData/org/elixir_lang/parser_definition/matched_dot_operator_call_operation/unqualified_no_parentheses_many_arguments_call_parsing_test_case/AtNonNumericOperation.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/matched_dot_operator_call_operation/unqualified_no_parentheses_many_arguments_call_parsing_test_case/AtNonNumericOperation.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/matched_dot_operator_call_operation/unqualified_no_parentheses_many_arguments_call_parsing_test_case/AtNonNumericOperation.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | @non_numeric.and unqualified positional, key: value
@non_numeric.&& unqualified positional, key: value
@non_numeric.|> unqualified positional, key: value
@non_numeric.@ unqualified positional, key: value
@non_numeric.& unqualified positional, key: value
@non_numeric.== unqualified positional, key: value
@non_numeric.+ unqualified positional, key: value
@non_numeric.^^^ unqualified positional, key: value
@non_numeric.<- unqualified positional, key: value
@non_numeric.in unqualified positional, key: value
@non_numeric.= unqualified positional, key: value
@non_numeric./ unqualified positional, key: value
@non_numeric.* unqualified positional, key: value
@non_numeric.or unqualified positional, key: value
@non_numeric.|| unqualified positional, key: value
@non_numeric.| unqualified positional, key: value
@non_numeric.<= unqualified positional, key: value
@non_numeric.-> unqualified positional, key: value
@non_numeric.<> unqualified positional, key: value
@non_numeric.^ unqualified positional, key: value
@non_numeric.not unqualified positional, key: value
@non_numeric.after unqualified positional, key: value
@non_numeric.do unqualified positional, key: value
@non_numeric.when unqualified positional, key: value
| 48.96 | 53 | 0.801471 |
73058935696313e93c785d455409a5f4d72e22df | 29 | ex | Elixir | code examples/example-9-16.ex | kujua/erlang-elixir-imperative-bookcompanion | 7bc9f033bacd0f8744ec6bcee3932794d594fe69 | [
"Apache-2.0"
] | 8 | 2016-08-14T12:35:16.000Z | 2021-01-26T04:05:31.000Z | code examples/example-9-16.ex | kujua/erlang-elixir-imperative-bookcompanion | 7bc9f033bacd0f8744ec6bcee3932794d594fe69 | [
"Apache-2.0"
] | null | null | null | code examples/example-9-16.ex | kujua/erlang-elixir-imperative-bookcompanion | 7bc9f033bacd0f8744ec6bcee3932794d594fe69 | [
"Apache-2.0"
] | 5 | 2016-08-18T22:12:19.000Z | 2020-02-17T18:52:41.000Z | "Recipient received #{what}"
| 14.5 | 28 | 0.724138 |
730593bf8c8300532351f84870b26086d4f8322b | 368 | ex | Elixir | lib/web/controllers/mssp_controller.ex | sb8244/grapevine | effaaa01294d30114090c20f9cc40b8665d834f2 | [
"MIT"
] | 107 | 2018-10-05T18:20:32.000Z | 2022-02-28T04:02:50.000Z | lib/web/controllers/mssp_controller.ex | sb8244/grapevine | effaaa01294d30114090c20f9cc40b8665d834f2 | [
"MIT"
] | 33 | 2018-10-05T14:11:18.000Z | 2022-02-10T22:19:18.000Z | lib/web/controllers/mssp_controller.ex | sb8244/grapevine | effaaa01294d30114090c20f9cc40b8665d834f2 | [
"MIT"
] | 18 | 2019-02-03T03:08:20.000Z | 2021-12-28T04:29:36.000Z | defmodule Web.MSSPController do
use Web, :controller
def index(conn, _params) do
conn
|> assign(:title, "MSSP Check - Grapevine")
|> assign(:open_graph_title, "MSSP Check")
|> assign(:open_graph_description, "Check your game's MSSP data with Grapevine")
|> assign(:open_graph_url, mssp_url(conn, :index))
|> render("index.html")
end
end
| 28.307692 | 84 | 0.679348 |
7305d6cc822000b53fcc2fc0a14f35e4bcf08489 | 1,631 | ex | Elixir | lib/credo/service/commands.ex | sevenseacat/credo | 48837401040d9c2340b5fb9c7d786d31f89f6426 | [
"MIT"
] | 13 | 2018-09-19T21:03:29.000Z | 2022-01-27T04:06:32.000Z | lib/credo/service/commands.ex | sevenseacat/credo | 48837401040d9c2340b5fb9c7d786d31f89f6426 | [
"MIT"
] | 1 | 2020-05-26T04:16:57.000Z | 2020-05-26T04:16:57.000Z | lib/credo/service/commands.ex | sevenseacat/credo | 48837401040d9c2340b5fb9c7d786d31f89f6426 | [
"MIT"
] | 3 | 2020-05-21T04:32:08.000Z | 2021-07-28T05:14:01.000Z | defmodule Credo.Service.Commands do
use GenServer
@command_map %{
"categories" => Credo.CLI.Command.Categories,
"explain" => Credo.CLI.Command.Explain,
"gen.check" => Credo.CLI.Command.GenCheck,
"gen.config" => Credo.CLI.Command.GenConfig,
"help" => Credo.CLI.Command.Help,
"list" => Credo.CLI.Command.List,
"suggest" => Credo.CLI.Command.Suggest,
"version" => Credo.CLI.Command.Version,
}
def start_link(opts \\ []) do
{:ok, _pid} = GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
def get(command_name) do
GenServer.call(__MODULE__, {:get, command_name})
end
def modules do
GenServer.call(__MODULE__, {:modules})
end
@doc "Returns a List with the names of all commands."
def names do
GenServer.call(__MODULE__, {:names})
end
@doc "Returns a List of all command modules."
def put(command_name, command_mod) do
GenServer.call(__MODULE__, {:put, command_name, command_mod})
end
# callbacks
def init(_) do
{:ok, @command_map}
end
def handle_call({:get, command_name}, _from, current_state) do
{:reply, current_state[command_name], current_state}
end
def handle_call({:put, command_name, command_mod}, _from, current_state) do
{:reply, command_mod, Map.put(current_state, command_name, command_mod)}
end
def handle_call({:modules}, _from, current_state) do
{:reply, Map.values(current_state), current_state}
end
def handle_call({:names}, _from, current_state) do
names =
current_state
|> Map.keys()
|> Enum.map(&to_string/1)
{:reply, names, current_state}
end
end
| 25.888889 | 77 | 0.680564 |
7305de9a162145625b9a37a2135639e8049f1e47 | 999 | exs | Elixir | test/gandalf/authentication/bearer_test.exs | pragmaticivan/gandalf | d6f79489104f3b3544247856bb93679f47ec9e0d | [
"Apache-2.0",
"MIT"
] | 1 | 2018-05-01T01:34:56.000Z | 2018-05-01T01:34:56.000Z | test/gandalf/authentication/bearer_test.exs | pragmaticivan/gandalf | d6f79489104f3b3544247856bb93679f47ec9e0d | [
"Apache-2.0",
"MIT"
] | null | null | null | test/gandalf/authentication/bearer_test.exs | pragmaticivan/gandalf | d6f79489104f3b3544247856bb93679f47ec9e0d | [
"Apache-2.0",
"MIT"
] | null | null | null | defmodule Gandalf.Authentication.BearerTest do
use ExUnit.Case
use Gandalf.RepoBase
use Gandalf.DB.Test.DataCase
import Gandalf.Factory
alias Gandalf.Authentication.Bearer, as: BearerAuthentication
@access_token_value "access_token_1234"
setup do
user = insert(:user)
insert(:access_token, %{value: @access_token_value, user: user})
:ok
end
test "authorize with bearer authentication" do
{:ok, authorized_user} = BearerAuthentication.authenticate(@access_token_value, [])
refute is_nil(authorized_user)
end
test "authorize with bearer authentication using Bearer prefix" do
{:ok, authorized_user} =
BearerAuthentication.authenticate("Bearer #{@access_token_value}", [])
refute is_nil(authorized_user)
end
test "authorize with bearer authentication from map parameters" do
{:ok, authorized_user} =
BearerAuthentication.authenticate(%{"access_token" => @access_token_value}, [])
refute is_nil(authorized_user)
end
end
| 28.542857 | 87 | 0.744745 |
7305f176201b34c6fb376437a5510b3037ce3de9 | 1,388 | ex | Elixir | test/support/data_case.ex | Youthink/short_url | b42450e8ea4ef8ea387291a73e3435eaa8fbfccd | [
"MIT"
] | 170 | 2018-02-06T05:04:25.000Z | 2021-01-27T07:45:25.000Z | test/support/data_case.ex | imfycc/short_url | b42450e8ea4ef8ea387291a73e3435eaa8fbfccd | [
"MIT"
] | 3 | 2021-03-09T01:06:36.000Z | 2022-02-09T22:57:27.000Z | test/support/data_case.ex | Youthink/short_url | b42450e8ea4ef8ea387291a73e3435eaa8fbfccd | [
"MIT"
] | 26 | 2018-02-26T06:53:10.000Z | 2020-11-08T23:02:13.000Z | defmodule ShortUrl.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias ShortUrl.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import ShortUrl.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(ShortUrl.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(ShortUrl.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transform changeset errors to a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Enum.reduce(opts, message, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
end
end
| 25.703704 | 77 | 0.680836 |
730605e784983217f556896ab61104a6692c2ab7 | 947 | exs | Elixir | example/server/config/config.exs | vip30/vue-phoenix | fe85a5f7c10fbc92c0f0b4e3cfd9d7752d4e5235 | [
"MIT"
] | 9 | 2019-01-21T07:16:44.000Z | 2022-02-18T05:53:15.000Z | example/server/config/config.exs | vip30/vue-phoenix | fe85a5f7c10fbc92c0f0b4e3cfd9d7752d4e5235 | [
"MIT"
] | null | null | null | example/server/config/config.exs | vip30/vue-phoenix | fe85a5f7c10fbc92c0f0b4e3cfd9d7752d4e5235 | [
"MIT"
] | 2 | 2020-07-13T12:44:14.000Z | 2021-09-26T14:38:17.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
use Mix.Config
# Configures the endpoint
config :event, EventWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "H3XDJ0QcvewZBkh5ANIKXFeLPSBsih+c258CBEx9fI7PCm38MYj8ClGFd6R0x7t8",
render_errors: [view: EventWeb.ErrorView, accepts: ~w(json)],
pubsub: [name: Event.PubSub, adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 33.821429 | 86 | 0.770855 |
730631bce04f6e2d61a0ea31a533bc6ba4c103c2 | 1,795 | ex | Elixir | lib/commanded/middleware/logger.ex | octowombat/commanded | 79a1965e276d3369dcf70ae65ef904d7e59f4a6a | [
"MIT"
] | 1,220 | 2017-10-31T10:56:40.000Z | 2022-03-31T17:40:19.000Z | lib/commanded/middleware/logger.ex | octowombat/commanded | 79a1965e276d3369dcf70ae65ef904d7e59f4a6a | [
"MIT"
] | 294 | 2017-11-03T10:33:41.000Z | 2022-03-24T08:36:42.000Z | lib/commanded/middleware/logger.ex | octowombat/commanded | 79a1965e276d3369dcf70ae65ef904d7e59f4a6a | [
"MIT"
] | 208 | 2017-11-03T10:56:47.000Z | 2022-03-14T05:49:38.000Z | defmodule Commanded.Middleware.Logger do
@moduledoc """
A `Commanded.Middleware` that logs each stage of the command dispatch using
the Elixir `Logger`:
- Before dispatch.
- After successful dispatch.
- After failed dispatch.
"""
@behaviour Commanded.Middleware
alias Commanded.Middleware.Pipeline
import Pipeline
require Logger
def before_dispatch(%Pipeline{} = pipeline) do
Logger.info(fn -> "#{log_context(pipeline)} dispatch start" end)
assign(pipeline, :started_at, DateTime.utc_now())
end
def after_dispatch(%Pipeline{} = pipeline) do
Logger.info(fn ->
"#{log_context(pipeline)} succeeded in #{formatted_diff(delta(pipeline))}"
end)
pipeline
end
def after_failure(%Pipeline{assigns: %{error: error, error_reason: error_reason}} = pipeline) do
Logger.info(fn ->
"#{log_context(pipeline)} failed #{inspect(error)} in #{formatted_diff(delta(pipeline))}, due to: #{inspect(error_reason)}"
end)
pipeline
end
def after_failure(%Pipeline{assigns: %{error: error}} = pipeline) do
Logger.info(fn ->
"#{log_context(pipeline)} failed #{inspect(error)} in #{formatted_diff(delta(pipeline))}"
end)
pipeline
end
def after_failure(%Pipeline{} = pipeline), do: pipeline
defp delta(%Pipeline{assigns: %{started_at: started_at}}) do
DateTime.diff(DateTime.utc_now(), started_at, :microsecond)
end
defp log_context(%Pipeline{command: command}) do
"#{inspect(command.__struct__)}"
end
defp formatted_diff(diff) when diff > 1_000_000,
do: [diff |> div(1_000_000) |> Integer.to_string(), "s"]
defp formatted_diff(diff) when diff > 1_000,
do: [diff |> div(1_000) |> Integer.to_string(), "ms"]
defp formatted_diff(diff), do: [diff |> Integer.to_string(), "µs"]
end
| 28.046875 | 129 | 0.689136 |
730635855e3a1ac15d068397c84b131687f94733 | 1,074 | exs | Elixir | mix.exs | mpugach/phoenix_guides | fc351b587ee8eaf5080ec25c67b3eef8c3814ce7 | [
"MIT"
] | null | null | null | mix.exs | mpugach/phoenix_guides | fc351b587ee8eaf5080ec25c67b3eef8c3814ce7 | [
"MIT"
] | null | null | null | mix.exs | mpugach/phoenix_guides | fc351b587ee8eaf5080ec25c67b3eef8c3814ce7 | [
"MIT"
] | null | null | null | defmodule PhoenixGuides.Mixfile do
use Mix.Project
@version "1.3.0-dev"
def project do
[app: :phoenix_guides,
name: "Phoenix Guides",
version: @version,
elixir: "~> 1.3",
deps: deps(),
preferred_cli_env: ["docs.watch": :docs, docs: :docs],
docs: [source_ref: "v#{@version}",
main: "overview",
logo: "logo.png",
assets: "docs/assets",
extra_section: "GUIDES",
extras: extras(),
homepage_url: "http://www.phoenixframework.org",
description: """
Phoenix Guides - Preview - The guides are published from the phoenixframework/phoenix project, not separately, however this config exists to make it easier to preview changes to the guides without also building the framework source API docs.
"""]]
end
def application do
[]
end
defp deps do
[{:ex_doc, "~> 0.14", only: :docs},
{:fs, "~> 0.9.1", only: :docs}]
end
defp extras do
System.cwd() |> Path.join("docs/**/*.md") |> Path.wildcard()
end
end
| 28.263158 | 253 | 0.585661 |
73064201f05994177dc4a59fcc2a2125904360a2 | 2,335 | ex | Elixir | clients/cloud_functions/lib/google_api/cloud_functions/v1/model/location.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_functions/lib/google_api/cloud_functions/v1/model/location.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_functions/lib/google_api/cloud_functions/v1/model/location.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudFunctions.V1.Model.Location do
@moduledoc """
A resource that represents Google Cloud Platform location.
## Attributes
* `displayName` (*type:* `String.t`, *default:* `nil`) - The friendly name for this location, typically a nearby city name. For example, "Tokyo".
* `labels` (*type:* `map()`, *default:* `nil`) - Cross-service attributes for the location. For example {"cloud.googleapis.com/region": "us-east1"}
* `locationId` (*type:* `String.t`, *default:* `nil`) - The canonical id for this location. For example: `"us-east1"`.
* `metadata` (*type:* `map()`, *default:* `nil`) - Service-specific metadata. For example the available capacity at the given location.
* `name` (*type:* `String.t`, *default:* `nil`) - Resource name for the location, which may vary between implementations. For example: `"projects/example-project/locations/us-east1"`
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:displayName => String.t() | nil,
:labels => map() | nil,
:locationId => String.t() | nil,
:metadata => map() | nil,
:name => String.t() | nil
}
field(:displayName)
field(:labels, type: :map)
field(:locationId)
field(:metadata, type: :map)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.CloudFunctions.V1.Model.Location do
def decode(value, options) do
GoogleApi.CloudFunctions.V1.Model.Location.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudFunctions.V1.Model.Location do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.576271 | 186 | 0.69636 |
73066160d4d2c73396807094a1c8dee80a667114 | 857 | ex | Elixir | lib/nomad_client/model/object_diff.ex | mrmstn/nomad_client | a586022e5eb4d166acba08b55b198ec079d4b118 | [
"Apache-2.0"
] | 8 | 2021-09-04T21:22:53.000Z | 2022-02-22T22:48:38.000Z | lib/nomad_client/model/object_diff.ex | mrmstn/nomad_client | a586022e5eb4d166acba08b55b198ec079d4b118 | [
"Apache-2.0"
] | null | null | null | lib/nomad_client/model/object_diff.ex | mrmstn/nomad_client | a586022e5eb4d166acba08b55b198ec079d4b118 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule NomadClient.Model.ObjectDiff do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:Type,
:Name,
:Fields,
:Objects
]
@type t :: %__MODULE__{
:Type => String.t() | nil,
:Name => String.t() | nil,
:Fields => [NomadClient.Model.FieldDiff.t()] | nil,
:Objects => [NomadClient.Model.ObjectDiff.t()] | nil
}
end
defimpl Poison.Decoder, for: NomadClient.Model.ObjectDiff do
import NomadClient.Deserializer
def decode(value, options) do
value
|> deserialize(:Fields, :list, NomadClient.Model.FieldDiff, options)
|> deserialize(:Objects, :list, NomadClient.Model.ObjectDiff, options)
end
end
| 24.485714 | 91 | 0.648775 |
730680a7f958eb1ea2ee026fd784fefa94a58f21 | 1,603 | ex | Elixir | test/support/model_case.ex | rzalamena/pxscratch | 19d3db64b52a6d034d818482c88421f697264b56 | [
"0BSD"
] | 1 | 2016-06-02T21:21:42.000Z | 2016-06-02T21:21:42.000Z | test/support/model_case.ex | rzalamena/pxscratch | 19d3db64b52a6d034d818482c88421f697264b56 | [
"0BSD"
] | null | null | null | test/support/model_case.ex | rzalamena/pxscratch | 19d3db64b52a6d034d818482c88421f697264b56 | [
"0BSD"
] | null | null | null | defmodule Pxscratch.ModelCase do
@moduledoc """
This module defines the test case to be used by
model tests.
You may define functions here to be used as helpers in
your model tests. See `errors_on/2`'s definition as reference.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Pxscratch.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query, only: [from: 1, from: 2]
import Pxscratch.ModelCase
end
end
setup tags do
unless tags[:async] do
Ecto.Adapters.SQL.restart_test_transaction(Pxscratch.Repo, [])
end
:ok
end
@doc """
Helper for returning list of errors in model when passed certain data.
## Examples
Given a User model that lists `:name` as a required field and validates
`:password` to be safe, it would return:
iex> errors_on(%User{}, %{password: "password"})
[password: "is unsafe", name: "is blank"]
You could then write your assertion like:
assert {:password, "is unsafe"} in errors_on(%User{}, %{password: "password"})
You can also create the changeset manually and retrieve the errors
field directly:
iex> changeset = User.changeset(%User{}, password: "password")
iex> {:password, "is unsafe"} in changeset.errors
true
"""
def errors_on(model, data) do
model.__struct__.changeset(model, data).errors
end
end
| 25.854839 | 84 | 0.686213 |
730682f896cc15287746b3cf676413c74fc18e0c | 1,288 | ex | Elixir | lib/mix_audit/formatting/human.ex | wolcanism/mix_audit | 5217f4be56519dd67dba0084929d1ed50517bc18 | [
"BSD-3-Clause"
] | 173 | 2020-03-04T14:04:46.000Z | 2022-03-26T21:50:27.000Z | lib/mix_audit/formatting/human.ex | wolcanism/mix_audit | 5217f4be56519dd67dba0084929d1ed50517bc18 | [
"BSD-3-Clause"
] | 5 | 2020-03-09T17:04:19.000Z | 2021-12-29T17:38:20.000Z | lib/mix_audit/formatting/human.ex | wolcanism/mix_audit | 5217f4be56519dd67dba0084929d1ed50517bc18 | [
"BSD-3-Clause"
] | 6 | 2020-03-11T21:43:48.000Z | 2021-12-21T19:20:48.000Z | defmodule MixAudit.Formatting.Human do
def format(report) do
if report.pass do
colorized_text("No vulnerabilities found.", :green)
else
"""
#{map_vulnerabilities(report.vulnerabilities)}
#{colorized_text("Vulnerabilities found!", :red)}
"""
end
end
defp map_vulnerabilities(vulnerabilities) do
vulnerabilities
|> Enum.map(&map_vulnerability/1)
|> Enum.join("\n")
end
defp map_vulnerability(vulnerability) do
"""
#{colorized_text("Name:", :red)} #{vulnerability.dependency.package}
#{colorized_text("Version:", :red)} #{vulnerability.dependency.version}
#{colorized_text("Lockfile:", :red)} #{vulnerability.dependency.lockfile}
#{colorized_text("CVE:", :red)} #{vulnerability.advisory.cve}
#{colorized_text("URL:", :red)} #{vulnerability.advisory.url}
#{colorized_text("Title:", :red)} #{String.trim(vulnerability.advisory.title)}
#{colorized_text("Patched versions:", :red)} #{patched_versions(vulnerability.advisory.patched_versions)}
"""
end
defp colorized_text(string, color) do
[color, string, :reset]
|> IO.ANSI.format()
|> IO.chardata_to_string()
end
defp patched_versions([]), do: "NONE"
defp patched_versions(versions), do: Enum.join(versions, ", ")
end
| 32.2 | 109 | 0.677795 |
7306a419a7701f34b41b57fab08ad49608e60909 | 852 | exs | Elixir | mix.exs | kevlar1818/mines | 2603c651a86a12f158df34a30e7b7fe138fc660e | [
"MIT"
] | 45 | 2017-05-17T15:24:57.000Z | 2020-07-28T01:10:02.000Z | mix.exs | kevlar1818/mines | 2603c651a86a12f158df34a30e7b7fe138fc660e | [
"MIT"
] | null | null | null | mix.exs | kevlar1818/mines | 2603c651a86a12f158df34a30e7b7fe138fc660e | [
"MIT"
] | 3 | 2017-08-19T12:04:16.000Z | 2018-03-08T04:10:58.000Z | defmodule Mines.Mixfile do
use Mix.Project
def project do
[app: :mines,
version: "0.0.1",
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps(),
escript: escript()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger]]
end
defp escript do
[main_module: Mines.Escript,
emu_args: "-noinput -elixir ansi_enabled true"]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:credo, "~> 0.5", only: [:dev, :test]}
]
end
end
| 20.780488 | 77 | 0.598592 |
7306afdb04998c14b210e472a33fb97daa2b3674 | 53,251 | exs | Elixir | lib/elixir/test/elixir/enum_test.exs | Zanadar/elixir | ef967b3e07f189b9cae37d5b12bd7258619b3e15 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/enum_test.exs | Zanadar/elixir | ef967b3e07f189b9cae37d5b12bd7258619b3e15 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/enum_test.exs | Zanadar/elixir | ef967b3e07f189b9cae37d5b12bd7258619b3e15 | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule EnumTest do
use ExUnit.Case, async: true
doctest Enum
defp assert_runs_enumeration_only_once(enum_fun) do
enumerator = Stream.map([:element], fn element ->
send(self(), element)
element
end)
enum_fun.(enumerator)
assert_received :element
refute_received :element
end
test "all?/2" do
assert Enum.all?([2, 4, 6])
refute Enum.all?([2, nil, 4])
assert Enum.all?([])
assert Enum.all?([2, 4, 6], fn(x) -> rem(x, 2) == 0 end)
refute Enum.all?([2, 3, 4], fn(x) -> rem(x, 2) == 0 end)
end
test "any?/2" do
refute Enum.any?([2, 4, 6], fn(x) -> rem(x, 2) == 1 end)
assert Enum.any?([2, 3, 4], fn(x) -> rem(x, 2) == 1 end)
refute Enum.any?([false, false, false])
assert Enum.any?([false, true, false])
assert Enum.any?([:foo, false, false])
refute Enum.any?([false, nil, false])
refute Enum.any?([])
end
test "at/3" do
assert Enum.at([2, 4, 6], 0) == 2
assert Enum.at([2, 4, 6], 2) == 6
assert Enum.at([2, 4, 6], 4) == nil
assert Enum.at([2, 4, 6], 4, :none) == :none
assert Enum.at([2, 4, 6], -2) == 4
assert Enum.at([2, 4, 6], -4) == nil
end
test "chunk/2" do
assert Enum.chunk([1, 2, 3, 4, 5], 2) == [[1, 2], [3, 4]]
end
test "chunk/4" do
assert Enum.chunk([1, 2, 3, 4, 5], 2, 2, [6]) == [[1, 2], [3, 4], [5, 6]]
assert Enum.chunk([1, 2, 3, 4, 5, 6], 3, 2) == [[1, 2, 3], [3, 4, 5]]
assert Enum.chunk([1, 2, 3, 4, 5, 6], 2, 3) == [[1, 2], [4, 5]]
assert Enum.chunk([1, 2, 3, 4, 5, 6], 3, 2, []) == [[1, 2, 3], [3, 4, 5], [5, 6]]
assert Enum.chunk([1, 2, 3, 4, 5, 6], 3, 3, []) == [[1, 2, 3], [4, 5, 6]]
assert Enum.chunk([1, 2, 3, 4, 5], 4, 4, 6..10) == [[1, 2, 3, 4], [5, 6, 7, 8]]
end
test "chunk_by/2" do
assert Enum.chunk_by([1, 2, 2, 3, 4, 4, 6, 7, 7], &(rem(&1, 2) == 1)) == [[1], [2, 2], [3], [4, 4, 6], [7, 7]]
assert Enum.chunk_by([1, 2, 3, 4], fn _ -> true end) == [[1, 2, 3, 4]]
assert Enum.chunk_by([], fn _ -> true end) == []
assert Enum.chunk_by([1], fn _ -> true end) == [[1]]
end
test "concat/1" do
assert Enum.concat([[1, [2], 3], [4], [5, 6]]) == [1, [2], 3, 4, 5, 6]
assert Enum.concat([[], []]) == []
assert Enum.concat([[]]) == []
assert Enum.concat([]) == []
end
test "concat/2" do
assert Enum.concat([], [1]) == [1]
assert Enum.concat([1, [2], 3], [4, 5]) == [1, [2], 3, 4, 5]
assert Enum.concat([1, 2], 3..5) == [1, 2, 3, 4, 5]
assert Enum.concat([], []) == []
assert Enum.concat([], 1..3) == [1, 2, 3]
assert Enum.concat(fn acc, _ -> acc end, [1]) == [1]
end
test "count/1" do
assert Enum.count([1, 2, 3]) == 3
assert Enum.count([]) == 0
assert Enum.count([1, true, false, nil]) == 4
end
test "count/2" do
assert Enum.count([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == 1
assert Enum.count([], fn(x) -> rem(x, 2) == 0 end) == 0
assert Enum.count([1, true, false, nil], & &1) == 2
end
test "dedup/1" do
assert Enum.dedup([1, 1, 2, 1, 1, 2, 1]) == [1, 2, 1, 2, 1]
assert Enum.dedup([2, 1, 1, 2, 1]) == [2, 1, 2, 1]
assert Enum.dedup([1, 2, 3, 4]) == [1, 2, 3, 4]
assert Enum.dedup([1, 1.0, 2.0, 2]) == [1, 1.0, 2.0, 2]
assert Enum.dedup([]) == []
assert Enum.dedup([nil, nil, true, {:value, true}]) == [nil, true, {:value, true}]
assert Enum.dedup([nil]) == [nil]
end
test "dedup_by/2" do
assert Enum.dedup_by([{1, :x}, {2, :y}, {2, :z}, {1, :x}], fn {x, _} -> x end)
== [{1, :x}, {2, :y}, {1, :x}]
assert Enum.dedup_by([5, 1, 2, 3, 2, 1], fn x -> x > 2 end) == [5, 1, 3, 2]
end
test "drop/2" do
assert Enum.drop([1, 2, 3], 0) == [1, 2, 3]
assert Enum.drop([1, 2, 3], 1) == [2, 3]
assert Enum.drop([1, 2, 3], 2) == [3]
assert Enum.drop([1, 2, 3], 3) == []
assert Enum.drop([1, 2, 3], 4) == []
assert Enum.drop([1, 2, 3], -1) == [1, 2]
assert Enum.drop([1, 2, 3], -2) == [1]
assert Enum.drop([1, 2, 3], -4) == []
assert Enum.drop([], 3) == []
end
test "drop_every/2" do
assert Enum.drop_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 2) == [2, 4, 6, 8, 10]
assert Enum.drop_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 3) == [2, 3, 5, 6, 8, 9]
assert Enum.drop_every([], 2) == []
assert Enum.drop_every([1, 2], 2) == [2]
assert Enum.drop_every([1, 2, 3], 0) == [1, 2, 3]
assert_raise FunctionClauseError, fn ->
Enum.drop_every([1, 2, 3], -1)
end
end
test "drop_while/2" do
assert Enum.drop_while([1, 2, 3, 4, 3, 2, 1], fn(x) -> x <= 3 end) == [4, 3, 2, 1]
assert Enum.drop_while([1, 2, 3], fn(_) -> false end) == [1, 2, 3]
assert Enum.drop_while([1, 2, 3], fn(x) -> x <= 3 end) == []
assert Enum.drop_while([], fn(_) -> false end) == []
end
test "each/2" do
try do
assert Enum.each([], fn(x) -> x end) == :ok
assert Enum.each([1, 2, 3], fn(x) -> Process.put(:enum_test_each, x * 2) end) == :ok
assert Process.get(:enum_test_each) == 6
after
Process.delete(:enum_test_each)
end
end
test "empty?/1" do
assert Enum.empty?([])
refute Enum.empty?([1, 2, 3])
refute Enum.empty?(1..3)
end
test "fetch/2" do
assert Enum.fetch([66], 0) == {:ok, 66}
assert Enum.fetch([66], -1) == {:ok, 66}
assert Enum.fetch([66], 1) == :error
assert Enum.fetch([66], -2) == :error
assert Enum.fetch([2, 4, 6], 0) == {:ok, 2}
assert Enum.fetch([2, 4, 6], -1) == {:ok, 6}
assert Enum.fetch([2, 4, 6], 2) == {:ok, 6}
assert Enum.fetch([2, 4, 6], 4) == :error
assert Enum.fetch([2, 4, 6], -2) == {:ok, 4}
assert Enum.fetch([2, 4, 6], -4) == :error
assert Enum.fetch([], 0) == :error
assert Enum.fetch([], 1) == :error
end
test "fetch!/2" do
assert Enum.fetch!([2, 4, 6], 0) == 2
assert Enum.fetch!([2, 4, 6], 2) == 6
assert Enum.fetch!([2, 4, 6], -2) == 4
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!([2, 4, 6], 4)
end
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!([2, 4, 6], -4)
end
end
test "filter/2" do
assert Enum.filter([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == [2]
assert Enum.filter([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) == [2, 4, 6]
assert Enum.filter([1, 2, false, 3, nil], & &1) == [1, 2, 3]
assert Enum.filter([1, 2, 3], &match?(1, &1)) == [1]
assert Enum.filter([1, 2, 3], &match?(x when x < 3, &1)) == [1, 2]
assert Enum.filter([1, 2, 3], fn _ -> true end) == [1, 2, 3]
end
test "filter_map/3" do
assert Enum.filter_map([1, 2, 3], fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4]
assert Enum.filter_map([2, 4, 6], fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4, 8, 12]
end
test "find/3" do
assert Enum.find([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) == nil
assert Enum.find([2, 4, 6], 0, fn(x) -> rem(x, 2) == 1 end) == 0
assert Enum.find([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) == 3
end
test "find_index/2" do
assert Enum.find_index([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) == nil
assert Enum.find_index([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) == 1
assert Stream.take(1..3, 3) |> Enum.find_index(fn _ -> false end) == nil
assert Stream.take(1..6, 6) |> Enum.find_index(fn x -> x == 5 end) == 4
end
test "find_value/2" do
assert Enum.find_value([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) == nil
assert Enum.find_value([2, 4, 6], 0, fn(x) -> rem(x, 2) == 1 end) == 0
assert Enum.find_value([2, 3, 4], fn(x) -> rem(x, 2) == 1 end)
end
test "flat_map/2" do
assert Enum.flat_map([], fn(x) -> [x, x] end) == []
assert Enum.flat_map([1, 2, 3], fn(x) -> [x, x] end) == [1, 1, 2, 2, 3, 3]
assert Enum.flat_map([1, 2, 3], fn(x) -> x..x+1 end) == [1, 2, 2, 3, 3, 4]
end
test "flat_map_reduce/3" do
assert Enum.flat_map_reduce([1, 2, 3], 0, &{[&1, &2], &1 + &2}) ==
{[1, 0, 2, 1, 3, 3], 6}
end
test "group_by/3" do
assert Enum.group_by([], fn _ -> raise "oops" end) == %{}
assert Enum.group_by([1, 2, 3], &rem(&1, 2)) == %{0 => [2], 1 => [1, 3]}
end
test "intersperse/2" do
assert Enum.intersperse([], true) == []
assert Enum.intersperse([1], true) == [1]
assert Enum.intersperse([1, 2, 3], true) == [1, true, 2, true, 3]
end
test "into/2" do
assert Enum.into([a: 1, b: 2], %{}) == %{a: 1, b: 2}
assert Enum.into([a: 1, b: 2], %{c: 3}) == %{a: 1, b: 2, c: 3}
assert Enum.into(%{a: 1, b: 2}, []) == [a: 1, b: 2]
assert Enum.into(1..3, []) == [1, 2, 3]
assert Enum.into(["H", "i"], "") == "Hi"
end
test "into/3" do
assert Enum.into([1, 2, 3], [], fn x -> x * 2 end) == [2, 4, 6]
assert Enum.into([1, 2, 3], "numbers: ", &to_string/1) == "numbers: 123"
assert_raise FunctionClauseError, fn ->
Enum.into([2, 3], %{a: 1}, &(&1))
end
end
test "join/2" do
assert Enum.join([], " = ") == ""
assert Enum.join([1, 2, 3], " = ") == "1 = 2 = 3"
assert Enum.join([1, "2", 3], " = ") == "1 = 2 = 3"
assert Enum.join([1, 2, 3]) == "123"
assert Enum.join(["", "", 1, 2, "", 3, "", "\n"], ";") == ";;1;2;;3;;\n"
assert Enum.join([""]) == ""
assert Enum.join(fn(acc, _) -> acc end, ".") == ""
end
test "map/2" do
assert Enum.map([], fn x -> x * 2 end) == []
assert Enum.map([1, 2, 3], fn x -> x * 2 end) == [2, 4, 6]
end
test "map_every/3" do
assert Enum.map_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 2, fn x -> x * 2 end) == [2, 2, 6, 4, 10, 6, 14, 8, 18, 10]
assert Enum.map_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 3, fn x -> x * 2 end) == [2, 2, 3, 8, 5, 6, 14, 8, 9, 20]
assert Enum.map_every([], 2, fn x -> x * 2 end) == []
assert Enum.map_every([1, 2], 2, fn x -> x * 2 end) == [2, 2]
assert Enum.map_every([1, 2, 3], 0, fn _x -> raise :i_should_have_never_been_invoked end) == [1, 2, 3]
assert Enum.map_every(1..3, 1, fn x -> x * 2 end) == [2, 4, 6]
assert_raise FunctionClauseError, fn ->
Enum.map_every([1, 2, 3], -1, fn x -> x * 2 end)
end
assert_raise FunctionClauseError, fn ->
Enum.map_every(1..10, 3.33, fn x -> x * 2 end)
end
assert Enum.map_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 9, fn x -> x + 1000 end) == [1001, 2, 3, 4, 5, 6, 7, 8, 9, 1010]
assert Enum.map_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 10, fn x -> x + 1000 end) == [1001, 2, 3, 4, 5, 6, 7, 8, 9, 10]
assert Enum.map_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 100, fn x -> x + 1000 end) == [1001, 2, 3, 4, 5, 6, 7, 8, 9, 10]
end
test "map_join/3" do
assert Enum.map_join([], " = ", &(&1 * 2)) == ""
assert Enum.map_join([1, 2, 3], " = ", &(&1 * 2)) == "2 = 4 = 6"
assert Enum.map_join([1, 2, 3], &(&1 * 2)) == "246"
assert Enum.map_join(["", "", 1, 2, "", 3, "", "\n"], ";", &(&1)) == ";;1;2;;3;;\n"
assert Enum.map_join([""], "", &(&1)) == ""
assert Enum.map_join(fn(acc, _) -> acc end, ".", &(&1 + 0)) == ""
end
test "map_reduce/3" do
assert Enum.map_reduce([], 1, fn(x, acc) -> {x * 2, x + acc} end) == {[], 1}
assert Enum.map_reduce([1, 2, 3], 1, fn(x, acc) -> {x * 2, x + acc} end) == {[2, 4, 6], 7}
end
test "max/1" do
assert Enum.max([1]) == 1
assert Enum.max([1, 2, 3]) == 3
assert Enum.max([1, [], :a, {}]) == []
assert_raise Enum.EmptyError, fn ->
Enum.max([])
end
end
test "max/2" do
assert Enum.max([1], fn -> nil end) == 1
assert Enum.max([1, 2, 3], fn -> nil end) == 3
assert Enum.max([1, [], :a, {}], fn -> nil end) == []
assert Enum.max([], fn -> :empty_value end) == :empty_value
assert Enum.max(%{}, fn -> :empty_value end) == :empty_value
assert_runs_enumeration_only_once(&Enum.max(&1, fn -> nil end))
end
test "max_by/2" do
assert Enum.max_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end) == "aaa"
assert_raise Enum.EmptyError, fn ->
Enum.max_by([], fn(x) -> String.length(x) end)
end
assert_raise Enum.EmptyError, fn ->
Enum.max_by(%{}, &(&1))
end
end
test "max_by/3" do
assert Enum.max_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end, fn -> nil end) == "aaa"
assert Enum.max_by([], fn(x) -> String.length(x) end, fn -> :empty_value end) == :empty_value
assert Enum.max_by(%{}, &(&1), fn -> :empty_value end) == :empty_value
assert Enum.max_by(%{}, &(&1), fn -> {:a, :tuple} end) == {:a, :tuple}
assert_runs_enumeration_only_once(&Enum.max_by(&1, fn e -> e end, fn -> nil end))
end
test "member?/2" do
assert Enum.member?([1, 2, 3], 2)
refute Enum.member?([], 0)
refute Enum.member?([1, 2, 3], 0)
end
test "min/1" do
assert Enum.min([1]) == 1
assert Enum.min([1, 2, 3]) == 1
assert Enum.min([[], :a, {}]) == :a
assert_raise Enum.EmptyError, fn ->
Enum.min([])
end
end
test "min/2" do
assert Enum.min([1], fn -> nil end) == 1
assert Enum.min([1, 2, 3], fn -> nil end) == 1
assert Enum.min([[], :a, {}], fn -> nil end) == :a
assert Enum.min([], fn -> :empty_value end) == :empty_value
assert Enum.min(%{}, fn -> :empty_value end) == :empty_value
assert_runs_enumeration_only_once(&Enum.min(&1, fn -> nil end))
end
test "min_by/2" do
assert Enum.min_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end) == "a"
assert_raise Enum.EmptyError, fn ->
Enum.min_by([], fn(x) -> String.length(x) end)
end
assert_raise Enum.EmptyError, fn ->
Enum.min_by(%{}, &(&1))
end
end
test "min_by/3" do
assert Enum.min_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end, fn -> nil end) == "a"
assert Enum.min_by([], fn(x) -> String.length(x) end, fn -> :empty_value end) == :empty_value
assert Enum.min_by(%{}, &(&1), fn -> :empty_value end) == :empty_value
assert Enum.min_by(%{}, &(&1), fn -> {:a, :tuple} end) == {:a, :tuple}
assert_runs_enumeration_only_once(&Enum.min_by(&1, fn e -> e end, fn -> nil end))
end
test "min_max/1" do
assert Enum.min_max([1]) == {1, 1}
assert Enum.min_max([2, 3, 1]) == {1, 3}
assert Enum.min_max([[], :a, {}]) == {:a, []}
assert_raise Enum.EmptyError, fn ->
Enum.min_max([])
end
end
test "min_max/2" do
assert Enum.min_max([1], fn -> nil end) == {1, 1}
assert Enum.min_max([2, 3, 1], fn -> nil end) == {1, 3}
assert Enum.min_max([[], :a, {}], fn -> nil end) == {:a, []}
assert Enum.min_max([], fn -> {:empty_min, :empty_max} end) == {:empty_min, :empty_max}
assert Enum.min_max(%{}, fn -> {:empty_min, :empty_max} end) == {:empty_min, :empty_max}
assert_runs_enumeration_only_once(&Enum.min_max(&1, fn -> nil end))
end
test "min_max_by/2" do
assert Enum.min_max_by(["aaa", "a", "aa"], fn(x) -> String.length(x) end) == {"a", "aaa"}
assert_raise Enum.EmptyError, fn ->
Enum.min_max_by([], fn(x) -> String.length(x) end)
end
end
test "min_max_by/3" do
assert Enum.min_max_by(["aaa", "a", "aa"], fn(x) -> String.length(x) end, fn -> nil end) == {"a", "aaa"}
assert Enum.min_max_by([], fn(x) -> String.length(x) end, fn -> {:no_min, :no_max} end) == {:no_min, :no_max}
assert Enum.min_max_by(%{}, fn(x) -> String.length(x) end, fn -> {:no_min, :no_max} end) == {:no_min, :no_max}
assert_runs_enumeration_only_once(&Enum.min_max_by(&1, fn x -> x end, fn -> nil end))
end
test "split_with/2" do
assert Enum.split_with([], fn(x) -> rem(x, 2) == 0 end) == {[], []}
assert Enum.split_with([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == {[2], [1, 3]}
assert Enum.split_with([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) == {[2, 4, 6], []}
assert Enum.split_with(1..5, fn(x) -> rem(x, 2) == 0 end) == {[2, 4], [1, 3, 5]}
assert Enum.split_with(-3..0, fn(x) -> x > 0 end) == {[], [-3, -2, -1, 0]}
assert Enum.split_with(%{}, fn(x) -> rem(x, 2) == 0 end) == {[], []}
assert Enum.split_with(%{a: 1, b: 2, c: 3}, fn({_k, v}) -> rem(v, 2) == 0 end) == {[b: 2], [a: 1, c: 3]}
assert Enum.split_with(%{b: 2, d: 4, f: 6}, fn({_k, v}) -> rem(v, 2) == 0 end) == {[b: 2, d: 4, f: 6], []}
end
test "random/1" do
# corner cases, independent of the seed
assert_raise Enum.EmptyError, fn -> Enum.random([]) end
assert Enum.random([1]) == 1
# set a fixed seed so the test can be deterministic
# please note the order of following assertions is important
seed1 = {1406, 407414, 139258}
seed2 = {1306, 421106, 567597}
:rand.seed(:exsplus, seed1)
assert Enum.random([1, 2]) == 2
assert Enum.random([1, 2, 3]) == 1
assert Enum.random([1, 2, 3, 4]) == 1
assert Enum.random([1, 2, 3, 4, 5]) == 2
:rand.seed(:exsplus, seed2)
assert Enum.random([1, 2]) == 2
assert Enum.random([1, 2, 3]) == 3
assert Enum.random([1, 2, 3, 4]) == 2
assert Enum.random([1, 2, 3, 4, 5]) == 3
end
test "reduce/2" do
assert Enum.reduce([1, 2, 3], fn(x, acc) -> x + acc end) == 6
assert_raise Enum.EmptyError, fn ->
Enum.reduce([], fn(x, acc) -> x + acc end)
end
assert_raise Enum.EmptyError, fn ->
Enum.reduce(%{}, fn(_, acc) -> acc end)
end
end
test "reduce/3" do
assert Enum.reduce([], 1, fn(x, acc) -> x + acc end) == 1
assert Enum.reduce([1, 2, 3], 1, fn(x, acc) -> x + acc end) == 7
end
test "reduce_while/3" do
assert Enum.reduce_while([1, 2, 3], 1, fn i, acc -> {:cont, acc + i} end) == 7
assert Enum.reduce_while([1, 2, 3], 1, fn _i, acc -> {:halt, acc} end) == 1
assert Enum.reduce_while([], 0, fn _i, acc -> {:cont, acc} end) == 0
end
test "reject/2" do
assert Enum.reject([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == [1, 3]
assert Enum.reject([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) == []
assert Enum.reject([1, true, nil, false, 2], &(&1)) == [nil, false]
end
test "reverse/1" do
assert Enum.reverse([]) == []
assert Enum.reverse([1, 2, 3]) == [3, 2, 1]
assert Enum.reverse([5..5]) == [5..5]
end
test "reverse/2" do
assert Enum.reverse([1, 2, 3], [4, 5, 6]) == [3, 2, 1, 4, 5, 6]
assert Enum.reverse([1, 2, 3], []) == [3, 2, 1]
assert Enum.reverse([5..5], [5]) == [5..5, 5]
end
test "reverse_slice/3" do
assert Enum.reverse_slice([], 1, 2) == []
assert Enum.reverse_slice([1, 2, 3], 0, 0) == [1, 2, 3]
assert Enum.reverse_slice([1, 2, 3], 0, 1) == [1, 2, 3]
assert Enum.reverse_slice([1, 2, 3], 0, 2) == [2, 1, 3]
assert Enum.reverse_slice([1, 2, 3], 0, 20000000) == [3, 2, 1]
assert Enum.reverse_slice([1, 2, 3], 100, 2) == [1, 2, 3]
assert Enum.reverse_slice([1, 2, 3], 10, 10) == [1, 2, 3]
end
test "scan/2" do
assert Enum.scan([1, 2, 3, 4, 5], &(&1 + &2)) == [1, 3, 6, 10, 15]
assert Enum.scan([], &(&1 + &2)) == []
end
test "scan/3" do
assert Enum.scan([1, 2, 3, 4, 5], 0, &(&1 + &2)) == [1, 3, 6, 10, 15]
assert Enum.scan([], 0, &(&1 + &2)) == []
end
test "shuffle/1" do
# set a fixed seed so the test can be deterministic
:rand.seed(:exsplus, {1374, 347975, 449264})
assert Enum.shuffle([1, 2, 3, 4, 5]) == [2, 1, 3, 5, 4]
end
test "slice/2" do
list = [1, 2, 3, 4, 5]
assert Enum.slice(list, 0..0) == [1]
assert Enum.slice(list, 0..1) == [1, 2]
assert Enum.slice(list, 0..2) == [1, 2, 3]
assert Enum.slice(list, 1, 2) == [2, 3]
assert Enum.slice(list, 1, 0) == []
assert Enum.slice(list, 2, 5) == [3, 4, 5]
assert Enum.slice(list, 2, 6) == [3, 4, 5]
assert Enum.slice(list, 5, 5) == []
assert Enum.slice(list, 6, 5) == []
assert Enum.slice(list, 6, 0) == []
assert Enum.slice(list, -6, 0) == []
assert Enum.slice(list, -6, 5) == []
assert Enum.slice(list, -2, 5) == [4, 5]
assert Enum.slice(list, -3, 1) == [3]
assert_raise FunctionClauseError, fn ->
Enum.slice(list, 0, -1)
end
assert_raise FunctionClauseError, fn ->
Enum.slice(list, 0.99, 0)
end
assert_raise FunctionClauseError, fn ->
Enum.slice(list, 0, 0.99)
end
end
test "slice/3" do
list = [1, 2, 3, 4, 5]
assert Enum.slice(list, 0, 0) == []
assert Enum.slice(list, 0, 1) == [1]
assert Enum.slice(list, 0, 2) == [1, 2]
assert Enum.slice(list, 1, 2) == [2, 3]
assert Enum.slice(list, 1, 0) == []
assert Enum.slice(list, 2, 5) == [3, 4, 5]
assert Enum.slice(list, 2, 6) == [3, 4, 5]
assert Enum.slice(list, 5, 5) == []
assert Enum.slice(list, 6, 5) == []
assert Enum.slice(list, 6, 0) == []
assert Enum.slice(list, -6, 0) == []
assert Enum.slice(list, -6, 5) == []
assert Enum.slice(list, -2, 5) == [4, 5]
assert Enum.slice(list, -3, 1) == [3]
assert_raise FunctionClauseError, fn ->
Enum.slice(list, 0, -1)
end
assert_raise FunctionClauseError, fn ->
Enum.slice(list, 0.99, 0)
end
assert_raise FunctionClauseError, fn ->
Enum.slice(list, 0, 0.99)
end
end
test "sort/1" do
assert Enum.sort([5, 3, 2, 4, 1]) == [1, 2, 3, 4, 5]
end
test "sort/2" do
assert Enum.sort([5, 3, 2, 4, 1], &(&1 > &2)) == [5, 4, 3, 2, 1]
end
test "sort_by/3" do
collection = [
[other_data: 1, sorted_data: 5],
[other_data: 3, sorted_data: 4],
[other_data: 4, sorted_data: 3],
[other_data: 2, sorted_data: 2],
[other_data: 5, sorted_data: 1]
]
assert Enum.sort_by(
collection,
&(&1[:sorted_data])
) == [
[other_data: 5, sorted_data: 1],
[other_data: 2, sorted_data: 2],
[other_data: 4, sorted_data: 3],
[other_data: 3, sorted_data: 4],
[other_data: 1, sorted_data: 5]
]
assert Enum.sort_by(collection, &(&1[:sorted_data]), &>=/2) == collection
end
test "split/2" do
assert Enum.split([1, 2, 3], 0) == {[], [1, 2, 3]}
assert Enum.split([1, 2, 3], 1) == {[1], [2, 3]}
assert Enum.split([1, 2, 3], 2) == {[1, 2], [3]}
assert Enum.split([1, 2, 3], 3) == {[1, 2, 3], []}
assert Enum.split([1, 2, 3], 4) == {[1, 2, 3], []}
assert Enum.split([], 3) == {[], []}
assert Enum.split([1, 2, 3], -1) == {[1, 2], [3]}
assert Enum.split([1, 2, 3], -2) == {[1], [2, 3]}
assert Enum.split([1, 2, 3], -3) == {[], [1, 2, 3]}
assert Enum.split([1, 2, 3], -10) == {[], [1, 2, 3]}
end
test "split_while/2" do
assert Enum.split_while([1, 2, 3], fn(_) -> false end) == {[], [1, 2, 3]}
assert Enum.split_while([1, 2, 3], fn(_) -> true end) == {[1, 2, 3], []}
assert Enum.split_while([1, 2, 3], fn(x) -> x > 2 end) == {[], [1, 2, 3]}
assert Enum.split_while([1, 2, 3], fn(x) -> x > 3 end) == {[], [1, 2, 3]}
assert Enum.split_while([1, 2, 3], fn(x) -> x < 3 end) == {[1, 2], [3]}
assert Enum.split_while([], fn(_) -> true end) == {[], []}
end
test "sum/1" do
assert Enum.sum([]) == 0
assert Enum.sum([1]) == 1
assert Enum.sum([1, 2, 3]) == 6
assert Enum.sum([1.1, 2.2, 3.3]) == 6.6
assert Enum.sum([-3, -2, -1, 0, 1, 2, 3]) == 0
assert Enum.sum(42..42) == 42
assert Enum.sum(11..17) == 98
assert Enum.sum(17..11) == 98
assert Enum.sum(11..-17) == Enum.sum(-17..11)
assert_raise ArithmeticError, fn ->
Enum.sum([{}])
end
assert_raise ArithmeticError, fn ->
Enum.sum([1, {}])
end
end
test "take/2" do
assert Enum.take([1, 2, 3], 0) == []
assert Enum.take([1, 2, 3], 1) == [1]
assert Enum.take([1, 2, 3], 2) == [1, 2]
assert Enum.take([1, 2, 3], 3) == [1, 2, 3]
assert Enum.take([1, 2, 3], 4) == [1, 2, 3]
assert Enum.take([1, 2, 3], -1) == [3]
assert Enum.take([1, 2, 3], -2) == [2, 3]
assert Enum.take([1, 2, 3], -4) == [1, 2, 3]
assert Enum.take([], 3) == []
end
test "take_every/2" do
assert Enum.take_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 2) == [1, 3, 5, 7, 9]
assert Enum.take_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 3) == [1, 4, 7, 10]
assert Enum.take_every([], 2) == []
assert Enum.take_every([1, 2], 2) == [1]
assert Enum.take_every([1, 2, 3], 0) == []
assert Enum.take_every(1..3, 1) == [1, 2, 3]
assert_raise FunctionClauseError, fn ->
Enum.take_every([1, 2, 3], -1)
end
assert_raise FunctionClauseError, fn ->
Enum.take_every(1..10, 3.33)
end
end
test "take_random/2" do
assert Enum.take_random(-42..-42, 1) == [-42]
# corner cases, independent of the seed
assert_raise FunctionClauseError, fn -> Enum.take_random([1, 2], -1) end
assert Enum.take_random([], 0) == []
assert Enum.take_random([], 3) == []
assert Enum.take_random([1], 0) == []
assert Enum.take_random([1], 2) == [1]
assert Enum.take_random([1, 2], 0) == []
# set a fixed seed so the test can be deterministic
# please note the order of following assertions is important
seed1 = {1406, 407414, 139258}
seed2 = {1406, 421106, 567597}
:rand.seed(:exsplus, seed1)
assert Enum.take_random([1, 2, 3], 1) == [2]
assert Enum.take_random([1, 2, 3], 2) == [3, 1]
assert Enum.take_random([1, 2, 3], 3) == [1, 3, 2]
assert Enum.take_random([1, 2, 3], 4) == [2, 3, 1]
:rand.seed(:exsplus, seed2)
assert Enum.take_random([1, 2, 3], 1) == [3]
assert Enum.take_random([1, 2, 3], 2) == [1, 2]
assert Enum.take_random([1, 2, 3], 3) == [1, 2, 3]
assert Enum.take_random([1, 2, 3], 4) == [2, 1, 3]
assert Enum.take_random([1, 2, 3], 129) == [3, 2, 1]
# assert that every item in the sample comes from the input list
list = for _<-1..100, do: make_ref()
for x <- Enum.take_random(list, 50) do
assert x in list
end
assert_raise FunctionClauseError, fn ->
Enum.take_random(1..10, -1)
end
assert_raise FunctionClauseError, fn ->
Enum.take_random(1..10, 10.0)
end
assert_raise FunctionClauseError, fn ->
Enum.take_random(1..10, 128.1)
end
end
test "take_while/2" do
assert Enum.take_while([1, 2, 3], fn(x) -> x > 3 end) == []
assert Enum.take_while([1, 2, 3], fn(x) -> x <= 1 end) == [1]
assert Enum.take_while([1, 2, 3], fn(x) -> x <= 3 end) == [1, 2, 3]
assert Enum.take_while([], fn(_) -> true end) == []
end
test "to_list/1" do
assert Enum.to_list([]) == []
end
test "uniq/1" do
assert Enum.uniq([5, 1, 2, 3, 2, 1]) == [5, 1, 2, 3]
end
test "uniq_by/2" do
assert Enum.uniq_by([1, 2, 3, 2, 1], fn x -> x end) == [1, 2, 3]
end
test "unzip/1" do
assert Enum.unzip([{:a, 1}, {:b, 2}, {:c, 3}]) == {[:a, :b, :c], [1, 2, 3]}
assert Enum.unzip([]) == {[], []}
assert Enum.unzip(%{a: 1, b: 2}) == {[:a, :b], [1, 2]}
assert Enum.unzip([foo: "a", bar: "b"]) == {[:foo, :bar], ["a", "b"]}
assert_raise FunctionClauseError, fn -> Enum.unzip([{:a, 1}, {:b, 2, "foo"}]) end
assert_raise FunctionClauseError, fn -> Enum.unzip([{1, 2, {3, {4, 5}}}]) end
assert_raise FunctionClauseError, fn -> Enum.unzip([1, 2, 3]) end
end
test "with_index/2" do
assert Enum.with_index([]) == []
assert Enum.with_index([1, 2, 3]) == [{1, 0}, {2, 1}, {3, 2}]
assert Enum.with_index([1, 2, 3], 10) == [{1, 10}, {2, 11}, {3, 12}]
end
test "zip/2" do
assert Enum.zip([:a, :b], [1, 2]) == [{:a, 1}, {:b, 2}]
assert Enum.zip([:a, :b], [1, 2, 3, 4]) == [{:a, 1}, {:b, 2}]
assert Enum.zip([:a, :b, :c, :d], [1, 2]) == [{:a, 1}, {:b, 2}]
assert Enum.zip([], [1]) == []
assert Enum.zip([1], []) == []
assert Enum.zip([], []) == []
end
test "zip/1" do
assert Enum.zip([[:a, :b], [1, 2], ["foo", "bar"]]) == [{:a, 1, "foo"}, {:b, 2, "bar"}]
assert Enum.zip([[:a, :b], [1, 2, 3, 4], ["foo", "bar", "baz", "qux"]]) == [{:a, 1, "foo"}, {:b, 2, "bar"}]
assert Enum.zip([[:a, :b, :c, :d], [1, 2], ["foo", "bar", "baz", "qux"]]) == [{:a, 1, "foo"}, {:b, 2, "bar"}]
assert Enum.zip([[:a, :b, :c, :d], [1, 2, 3, 4], ["foo", "bar"]]) == [{:a, 1, "foo"}, {:b, 2, "bar"}]
assert Enum.zip([1..10, ["foo", "bar"]]) == [{1, "foo"}, {2, "bar"}]
assert Enum.zip([]) == []
assert Enum.zip([[]]) == []
assert Enum.zip([[1]]) == [{1}]
assert Enum.zip([[], [], [], []]) == []
end
end
defmodule EnumTest.Range do
use ExUnit.Case, async: true
test "all?/2" do
assert Enum.all?(0..1)
assert Enum.all?(1..0)
refute Enum.all?(0..5, fn(x) -> rem(x, 2) == 0 end)
assert Enum.all?(0..1, fn(x) -> x < 2 end)
end
test "any?/2" do
assert Enum.any?(1..0)
refute Enum.any?(0..5, &(&1 > 10))
assert Enum.any?(0..5, &(&1 > 3))
end
test "at/3" do
assert Enum.at(2..6, 0) == 2
assert Enum.at(2..6, 4) == 6
assert Enum.at(2..6, 6) == nil
assert Enum.at(2..6, 6, :none) == :none
assert Enum.at(2..6, -2) == 5
assert Enum.at(2..6, -8) == nil
end
test "chunk/2" do
assert Enum.chunk(1..5, 2) == [[1, 2], [3, 4]]
end
test "chunk/4" do
assert Enum.chunk(1..5, 2, 2, [6]) == [[1, 2], [3, 4], [5, 6]]
assert Enum.chunk(1..6, 3, 2) == [[1, 2, 3], [3, 4, 5]]
assert Enum.chunk(1..6, 2, 3) == [[1, 2], [4, 5]]
assert Enum.chunk(1..6, 3, 2, []) == [[1, 2, 3], [3, 4, 5], [5, 6]]
assert Enum.chunk(1..5, 4, 4, 6..10) == [[1, 2, 3, 4], [5, 6, 7, 8]]
end
test "chunk_by/2" do
assert Enum.chunk_by(1..4, fn _ -> true end) == [[1, 2, 3, 4]]
assert Enum.chunk_by(1..4, &(rem(&1, 2) == 1)) == [[1], [2], [3], [4]]
end
test "concat/1" do
assert Enum.concat([1..2, 4..6]) == [1, 2, 4, 5, 6]
assert Enum.concat([1..5, fn acc, _ -> acc end, [1]]) == [1, 2, 3, 4, 5, 1]
end
test "concat/2" do
assert Enum.concat(1..3, 4..5) == [1, 2, 3, 4, 5]
assert Enum.concat(1..3, [4, 5]) == [1, 2, 3, 4, 5]
assert Enum.concat(1..3, []) == [1, 2, 3]
assert Enum.concat(1..3, 0..0) == [1, 2, 3, 0]
end
test "count/1" do
assert Enum.count(1..5) == 5
assert Enum.count(1..1) == 1
end
test "count/2" do
assert Enum.count(1..5, fn(x) -> rem(x, 2) == 0 end) == 2
assert Enum.count(1..1, fn(x) -> rem(x, 2) == 0 end) == 0
end
test "dedup/1" do
assert Enum.dedup(1..3) == [1, 2, 3]
end
test "dedup_by/2" do
assert Enum.dedup_by(1..3, fn _ -> 1 end) == [1]
end
test "drop/2" do
assert Enum.drop(1..3, 0) == [1, 2, 3]
assert Enum.drop(1..3, 1) == [2, 3]
assert Enum.drop(1..3, 2) == [3]
assert Enum.drop(1..3, 3) == []
assert Enum.drop(1..3, 4) == []
assert Enum.drop(1..3, -1) == [1, 2]
assert Enum.drop(1..3, -2) == [1]
assert Enum.drop(1..3, -4) == []
assert Enum.drop(1..0, 3) == []
end
test "drop_every/2" do
assert Enum.drop_every(1..10, 2) == [2, 4, 6, 8, 10]
assert Enum.drop_every(1..10, 3) == [2, 3, 5, 6, 8, 9]
assert Enum.drop_every(0..0, 2) == []
assert Enum.drop_every(1..2, 2) == [2]
assert Enum.drop_every(1..3, 0) == [1, 2, 3]
assert Enum.drop_every(1..3, 1) == []
assert_raise FunctionClauseError, fn ->
Enum.drop_every(1..10, 3.33)
end
end
test "drop_while/2" do
assert Enum.drop_while(0..6, fn(x) -> x <= 3 end) == [4, 5, 6]
assert Enum.drop_while(0..6, fn(_) -> false end) == [0, 1, 2, 3, 4, 5, 6]
assert Enum.drop_while(0..3, fn(x) -> x <= 3 end) == []
assert Enum.drop_while(1..0, fn(_) -> nil end) == [1, 0]
end
test "each/2" do
try do
assert Enum.each(1..0, fn(x) -> x end) == :ok
assert Enum.each(1..3, fn(x) -> Process.put(:enum_test_each, x * 2) end) == :ok
assert Process.get(:enum_test_each) == 6
after
Process.delete(:enum_test_each)
end
try do
assert Enum.each(-1..-3, fn(x) -> Process.put(:enum_test_each, x * 2) end) == :ok
assert Process.get(:enum_test_each) == -6
after
Process.delete(:enum_test_each)
end
end
test "empty?/1" do
refute Enum.empty?(1..0)
refute Enum.empty?(1..2)
end
test "fetch/2" do
# ascending order
assert Enum.fetch(-10..20, 4) == {:ok, -6}
assert Enum.fetch(-10..20, -4) == {:ok, 17}
# ascending order, first
assert Enum.fetch(-10..20, 0) == {:ok, -10}
assert Enum.fetch(-10..20, -31) == {:ok, -10}
# ascending order, last
assert Enum.fetch(-10..20, -1) == {:ok, 20}
assert Enum.fetch(-10..20, 30) == {:ok, 20}
# ascending order, out of bound
assert Enum.fetch(-10..20, 31) == :error
assert Enum.fetch(-10..20, -32) == :error
# descending order
assert Enum.fetch(20..-10, 4) == {:ok, 16}
assert Enum.fetch(20..-10, -4) == {:ok, -7}
# descending order, first
assert Enum.fetch(20..-10, 0) == {:ok, 20}
assert Enum.fetch(20..-10, -31) == {:ok, 20}
# descending order, last
assert Enum.fetch(20..-10, -1) == {:ok, -10}
assert Enum.fetch(20..-10, 30) == {:ok, -10}
# descending order, out of bound
assert Enum.fetch(20..-10, 31) == :error
assert Enum.fetch(20..-10, -32) == :error
# edge cases
assert Enum.fetch(42..42, 0) == {:ok, 42}
assert Enum.fetch(42..42, -1) == {:ok, 42}
assert Enum.fetch(42..42, 2) == :error
assert Enum.fetch(42..42, -2) == :error
end
test "fetch!/2" do
assert Enum.fetch!(2..6, 0) == 2
assert Enum.fetch!(2..6, 4) == 6
assert Enum.fetch!(2..6, -1) == 6
assert Enum.fetch!(2..6, -2) == 5
assert Enum.fetch!(-2..-6, 0) == -2
assert Enum.fetch!(-2..-6, 4) == -6
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!(2..6, 8)
end
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!(-2..-6, 8)
end
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!(2..6, -8)
end
end
test "filter/2" do
assert Enum.filter(1..3, fn(x) -> rem(x, 2) == 0 end) == [2]
assert Enum.filter(1..6, fn(x) -> rem(x, 2) == 0 end) == [2, 4, 6]
assert Enum.filter(1..3, &match?(1, &1)) == [1]
assert Enum.filter(1..3, &match?(x when x < 3, &1)) == [1, 2]
assert Enum.filter(1..3, fn _ -> true end) == [1, 2, 3]
end
test "filter_map/3" do
assert Enum.filter_map(1..3, fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4]
assert Enum.filter_map(2..6, fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4, 8, 12]
end
test "find/3" do
assert Enum.find(2..6, fn(x) -> rem(x, 2) == 0 end) == 2
assert Enum.find(2..6, fn(x) -> rem(x, 2) == 1 end) == 3
assert Enum.find(2..6, fn _ -> false end) == nil
assert Enum.find(2..6, 0, fn _ -> false end) == 0
end
test "find_index/2" do
assert Enum.find_index(2..6, fn(x) -> rem(x, 2) == 1 end) == 1
end
test "find_value/3" do
assert Enum.find_value(2..6, fn(x) -> rem(x, 2) == 1 end)
end
test "flat_map/2" do
assert Enum.flat_map(1..3, fn(x) -> [x, x] end) == [1, 1, 2, 2, 3, 3]
end
test "flat_map_reduce/3" do
assert Enum.flat_map_reduce(1..100, 0, fn i, acc ->
if acc < 3, do: {[i], acc + 1}, else: {:halt, acc}
end) == {[1, 2, 3], 3}
end
test "group_by/3" do
assert Enum.group_by(1..6, &rem(&1, 3)) ==
%{0 => [3, 6], 1 => [1, 4], 2 => [2, 5]}
assert Enum.group_by(1..6, &rem(&1, 3), &(&1 * 2)) ==
%{0 => [6, 12], 1 => [2, 8], 2 => [4, 10]}
end
test "intersperse/2" do
assert Enum.intersperse(1..0, true) == [1, true, 0]
assert Enum.intersperse(1..3, false) == [1, false, 2, false, 3]
end
test "into/2" do
assert Enum.into(3..5, [1, 2]) == [1, 2, 3, 4, 5]
assert Enum.into(1..5, []) == [1, 2, 3, 4, 5]
end
test "into/3" do
assert Enum.into(1..5, [], fn x -> x * 2 end) == [2, 4, 6, 8, 10]
assert Enum.into(1..3, "numbers: ", &to_string/1) == "numbers: 123"
end
test "join/2" do
assert Enum.join(1..0, " = ") == "1 = 0"
assert Enum.join(1..3, " = ") == "1 = 2 = 3"
assert Enum.join(1..3) == "123"
end
test "map/2" do
assert Enum.map(1..3, fn x -> x * 2 end) == [2, 4, 6]
assert Enum.map(-1..-3, fn x -> x * 2 end) == [-2, -4, -6]
end
test "map_every/3" do
assert Enum.map_every(1..10, 2, fn x -> x * 2 end) == [2, 2, 6, 4, 10, 6, 14, 8, 18, 10]
assert Enum.map_every(-1..-10, 2, fn x -> x * 2 end) == [-2, -2, -6, -4, -10, -6, -14, -8, -18, -10]
assert Enum.map_every(1..2, 2, fn x -> x * 2 end) == [2, 2]
assert Enum.map_every(1..3, 0, fn x -> x * 2 end) == [1, 2, 3]
assert_raise FunctionClauseError, fn ->
Enum.map_every(1..3, -1, fn x -> x * 2 end)
end
end
test "map_join/3" do
assert Enum.map_join(1..0, " = ", &(&1 * 2)) == "2 = 0"
assert Enum.map_join(1..3, " = ", &(&1 * 2)) == "2 = 4 = 6"
assert Enum.map_join(1..3, &(&1 * 2)) == "246"
end
test "map_reduce/3" do
assert Enum.map_reduce(1..0, 1, fn(x, acc) -> {x * 2, x + acc} end) == {[2, 0], 2}
assert Enum.map_reduce(1..3, 1, fn(x, acc) -> {x * 2, x + acc} end) == {[2, 4, 6], 7}
end
test "max/1" do
assert Enum.max(1..1) == 1
assert Enum.max(1..3) == 3
assert Enum.max(3..1) == 3
end
test "max_by/2" do
assert Enum.max_by(1..1, fn(x) -> :math.pow(-2, x) end) == 1
assert Enum.max_by(1..3, fn(x) -> :math.pow(-2, x) end) == 2
end
test "member?/2" do
assert Enum.member?(1..3, 2)
refute Enum.member?(1..3, 0)
end
test "min/1" do
assert Enum.min(1..1) == 1
assert Enum.min(1..3) == 1
end
test "min_by/2" do
assert Enum.min_by(1..1, fn(x) -> :math.pow(-2, x) end) == 1
assert Enum.min_by(1..3, fn(x) -> :math.pow(-2, x) end) == 3
end
test "min_max/1" do
assert Enum.min_max(1..1) == {1, 1}
assert Enum.min_max(1..3) == {1, 3}
assert Enum.min_max(3..1) == {1, 3}
end
test "min_max_by/2" do
assert Enum.min_max_by(1..1, fn(x) -> x end) == {1, 1}
assert Enum.min_max_by(1..3, fn(x) -> x end) == {1, 3}
end
test "split_with/2" do
assert Enum.split_with(1..3, fn(x) -> rem(x, 2) == 0 end) == {[2], [1, 3]}
end
test "random/1" do
# corner cases, independent of the seed
assert Enum.random(1..1) == 1
# set a fixed seed so the test can be deterministic
# please note the order of following assertions is important
seed1 = {1406, 407414, 139258}
seed2 = {1306, 421106, 567597}
:rand.seed(:exsplus, seed1)
assert Enum.random(1..2) == 1
assert Enum.random(1..3) == 2
assert Enum.random(3..1) == 3
:rand.seed(:exsplus, seed2)
assert Enum.random(1..2) == 1
assert Enum.random(1..3) == 3
end
test "reduce/2" do
assert Enum.reduce(1..3, fn(x, acc) -> x + acc end) == 6
end
test "reduce/3" do
assert Enum.reduce(1..0, 1, fn(x, acc) -> x + acc end) == 2
assert Enum.reduce(1..3, 1, fn(x, acc) -> x + acc end) == 7
end
test "reduce_while/3" do
assert Enum.reduce_while(1..100, 0, fn i, acc ->
if i <= 3, do: {:cont, acc + i}, else: {:halt, acc}
end) == 6
end
test "reject/2" do
assert Enum.reject(1..3, fn(x) -> rem(x, 2) == 0 end) == [1, 3]
assert Enum.reject(1..6, fn(x) -> rem(x, 2) == 0 end) == [1, 3, 5]
end
test "reverse/1" do
assert Enum.reverse(0..0) == [0]
assert Enum.reverse(1..3) == [3, 2, 1]
assert Enum.reverse(-3..5) == [5, 4, 3, 2, 1, 0, -1, -2, -3]
assert Enum.reverse(5..5) == [5]
end
test "reverse/2" do
assert Enum.reverse(1..3, 4..6) == [3, 2, 1, 4, 5, 6]
assert Enum.reverse([1, 2, 3], 4..6) == [3, 2, 1, 4, 5, 6]
assert Enum.reverse(1..3, [4, 5, 6]) == [3, 2, 1, 4, 5, 6]
assert Enum.reverse(-3..5, MapSet.new([-3, -2])) == [5, 4, 3, 2, 1, 0, -1, -2, -3, -3, -2]
assert Enum.reverse(5..5, [5]) == [5, 5]
end
test "reverse_slice/3" do
assert Enum.reverse_slice(1..6, 2, 0) == [1, 2, 3, 4, 5, 6]
assert Enum.reverse_slice(1..6, 2, 2) == [1, 2, 4, 3, 5, 6]
assert Enum.reverse_slice(1..6, 2, 4) == [1, 2, 6, 5, 4, 3]
assert Enum.reverse_slice(1..6, 2, 10000000) == [1, 2, 6, 5, 4, 3]
assert Enum.reverse_slice(1..6, 10000000, 4) == [1, 2, 3, 4, 5, 6]
assert Enum.reverse_slice(1..6, 50, 50) == [1, 2, 3, 4, 5, 6]
end
test "scan/2" do
assert Enum.scan(1..5, &(&1 + &2)) == [1, 3, 6, 10, 15]
end
test "scan/3" do
assert Enum.scan(1..5, 0, &(&1 + &2)) == [1, 3, 6, 10, 15]
end
test "shuffle/1" do
# set a fixed seed so the test can be deterministic
:rand.seed(:exsplus, {1374, 347975, 449264})
assert Enum.shuffle(1..5) == [2, 1, 3, 5, 4]
end
test "slice/2" do
assert Enum.slice(1..5, 0..0) == [1]
assert Enum.slice(1..5, 0..1) == [1, 2]
assert Enum.slice(1..5, 0..2) == [1, 2, 3]
assert Enum.slice(1..5, 1..2) == [2, 3]
assert Enum.slice(1..5, 1..0) == []
assert Enum.slice(1..5, 2..5) == [3, 4, 5]
assert Enum.slice(1..5, 2..6) == [3, 4, 5]
assert Enum.slice(1..5, 4..4) == [5]
assert Enum.slice(1..5, 5..5) == []
assert Enum.slice(1..5, 6..5) == []
assert Enum.slice(1..5, 6..0) == []
assert Enum.slice(1..5, -6..0) == []
assert Enum.slice(1..5, -6..5) == []
assert Enum.slice(1..5, -5..-1) == [1, 2, 3, 4, 5]
assert Enum.slice(1..5, -5..-3) == [1, 2, 3]
assert Enum.slice(1..5, -6..-1) == []
assert_raise ArgumentError, fn ->
x = 1.1
Enum.slice(1..5, x..2)
end
assert_raise ArgumentError, fn ->
x = 1.9
Enum.slice(1..5, 1..x)
end
assert Enum.slice(5..1, 0..0) == [5]
assert Enum.slice(5..1, 0..1) == [5, 4]
assert Enum.slice(5..1, 0..2) == [5, 4, 3]
assert Enum.slice(5..1, 1..2) == [4, 3]
assert Enum.slice(5..1, 1..0) == []
assert Enum.slice(5..1, 2..5) == [3, 2, 1]
assert Enum.slice(5..1, 2..6) == [3, 2, 1]
assert Enum.slice(5..1, 4..4) == [1]
assert Enum.slice(5..1, 5..5) == []
assert Enum.slice(5..1, 6..5) == []
assert Enum.slice(5..1, 6..0) == []
assert Enum.slice(5..1, -6..0) == []
assert Enum.slice(5..1, -6..5) == []
assert Enum.slice(5..1, -5..-1) == [5, 4, 3, 2, 1]
assert Enum.slice(5..1, -5..-3) == [5, 4, 3]
assert Enum.slice(5..1, -6..-1) == []
end
test "slice/3" do
assert Enum.slice(1..5, 0, 0) == []
assert Enum.slice(1..5, 0, 1) == [1]
assert Enum.slice(1..5, 0, 2) == [1, 2]
assert Enum.slice(1..5, 1, 2) == [2, 3]
assert Enum.slice(1..5, 1, 0) == []
assert Enum.slice(1..5, 2, 3) == [3, 4, 5]
assert Enum.slice(1..5, 2, 6) == [3, 4, 5]
assert Enum.slice(1..5, 5, 5) == []
assert Enum.slice(1..5, 6, 5) == []
assert Enum.slice(1..5, 6, 0) == []
assert Enum.slice(1..5, -6, 0) == []
assert Enum.slice(1..5, -6, 5) == []
assert Enum.slice(1..5, -2, 5) == [4, 5]
assert Enum.slice(1..5, -3, 1) == [3]
assert_raise FunctionClauseError, fn ->
Enum.slice(1..5, 0, -1)
end
assert_raise FunctionClauseError, fn ->
Enum.slice(1..5, 0.99, 0)
end
assert_raise FunctionClauseError, fn ->
Enum.slice(1..5, 0, 0.99)
end
assert Enum.slice(5..1, 0, 0) == []
assert Enum.slice(5..1, 0, 1) == [5]
assert Enum.slice(5..1, 0, 2) == [5, 4]
assert Enum.slice(5..1, 1, 2) == [4, 3]
assert Enum.slice(5..1, 1, 0) == []
assert Enum.slice(5..1, 2, 3) == [3, 2, 1]
assert Enum.slice(5..1, 2, 6) == [3, 2, 1]
assert Enum.slice(5..1, 4, 4) == [1]
assert Enum.slice(5..1, 5, 5) == []
assert Enum.slice(5..1, 6, 5) == []
assert Enum.slice(5..1, 6, 0) == []
assert Enum.slice(5..1, -6, 0) == []
assert Enum.slice(5..1, -6, 5) == []
end
test "sort/1" do
assert Enum.sort(3..1) == [1, 2, 3]
assert Enum.sort(2..1) == [1, 2]
assert Enum.sort(1..1) == [1]
end
test "sort/2" do
assert Enum.sort(3..1, &(&1 > &2)) == [3, 2, 1]
assert Enum.sort(2..1, &(&1 > &2)) == [2, 1]
assert Enum.sort(1..1, &(&1 > &2)) == [1]
end
test "sort_by/2" do
assert Enum.sort_by(3..1, & &1) == [1, 2, 3]
end
test "split/2" do
assert Enum.split(1..3, 0) == {[], [1, 2, 3]}
assert Enum.split(1..3, 1) == {[1], [2, 3]}
assert Enum.split(1..3, 2) == {[1, 2], [3]}
assert Enum.split(1..3, 3) == {[1, 2, 3], []}
assert Enum.split(1..3, 4) == {[1, 2, 3], []}
assert Enum.split(1..3, -1) == {[1, 2], [3]}
assert Enum.split(1..3, -2) == {[1], [2, 3]}
assert Enum.split(1..3, -3) == {[], [1, 2, 3]}
assert Enum.split(1..3, -10) == {[], [1, 2, 3]}
assert Enum.split(1..0, 3) == {[1, 0], []}
end
test "split_while/2" do
assert Enum.split_while(1..3, fn(_) -> false end) == {[], [1, 2, 3]}
assert Enum.split_while(1..3, fn(_) -> nil end) == {[], [1, 2, 3]}
assert Enum.split_while(1..3, fn(_) -> true end) == {[1, 2, 3], []}
assert Enum.split_while(1..3, fn(x) -> x > 2 end) == {[], [1, 2, 3]}
assert Enum.split_while(1..3, fn(x) -> x > 3 end) == {[], [1, 2, 3]}
assert Enum.split_while(1..3, fn(x) -> x < 3 end) == {[1, 2], [3]}
assert Enum.split_while(1..3, fn(x) -> x end) == {[1, 2, 3], []}
assert Enum.split_while(1..0, fn(_) -> true end) == {[1, 0], []}
end
test "sum/1" do
assert Enum.sum(0..0) == 0
assert Enum.sum(1..1) == 1
assert Enum.sum(1..3) == 6
assert Enum.sum(0..100) == 5050
assert Enum.sum(10..100) == 5005
assert Enum.sum(100..10) == 5005
assert Enum.sum(-10..-20) == -165
assert Enum.sum(-10..2) == -52
end
test "take/2" do
assert Enum.take(1..3, 0) == []
assert Enum.take(1..3, 1) == [1]
assert Enum.take(1..3, 2) == [1, 2]
assert Enum.take(1..3, 3) == [1, 2, 3]
assert Enum.take(1..3, 4) == [1, 2, 3]
assert Enum.take(1..3, -1) == [3]
assert Enum.take(1..3, -2) == [2, 3]
assert Enum.take(1..3, -4) == [1, 2, 3]
assert Enum.take(1..0, 3) == [1, 0]
end
test "take_every/2" do
assert Enum.take_every(1..10, 2) == [1, 3, 5, 7, 9]
assert Enum.take_every(1..2, 2) == [1]
assert Enum.take_every(1..3, 0) == []
assert_raise FunctionClauseError, fn ->
Enum.take_every(1..3, -1)
end
end
test "take_random/2" do
# corner cases, independent of the seed
assert_raise FunctionClauseError, fn -> Enum.take_random(1..2, -1) end
assert Enum.take_random(1..1, 0) == []
assert Enum.take_random(1..1, 1) == [1]
assert Enum.take_random(1..1, 2) == [1]
assert Enum.take_random(1..2, 0) == []
# set a fixed seed so the test can be deterministic
# please note the order of following assertions is important
seed1 = {1406, 407414, 139258}
seed2 = {1406, 421106, 567597}
:rand.seed(:exsplus, seed1)
assert Enum.take_random(1..3, 1) == [2]
assert Enum.take_random(1..3, 2) == [3, 1]
assert Enum.take_random(1..3, 3) == [1, 3, 2]
assert Enum.take_random(1..3, 4) == [2, 3, 1]
assert Enum.take_random(3..1, 1) == [3]
:rand.seed(:exsplus, seed2)
assert Enum.take_random(1..3, 1) == [3]
assert Enum.take_random(1..3, 2) == [1, 2]
assert Enum.take_random(1..3, 3) == [1, 2, 3]
assert Enum.take_random(1..3, 4) == [2, 1, 3]
# make sure optimizations don't change fixed seeded tests
:rand.seed(:exsplus, {101, 102, 103})
one = Enum.take_random(1..100, 1)
:rand.seed(:exsplus, {101, 102, 103})
two = Enum.take_random(1..100, 2)
assert hd(one) == hd(two)
end
test "take_while/2" do
assert Enum.take_while(1..3, fn(x) -> x > 3 end) == []
assert Enum.take_while(1..3, fn(x) -> x <= 1 end) == [1]
assert Enum.take_while(1..3, fn(x) -> x <= 3 end) == [1, 2, 3]
assert Enum.take_while(1..3, fn(x) -> x end) == [1, 2, 3]
assert Enum.take_while(1..3, fn(_) -> nil end) == []
end
test "to_list/1" do
assert Enum.to_list([1, 2, 3]) == [1, 2, 3]
assert Enum.to_list(MapSet.new(1..3)) == [1, 2, 3]
assert Enum.to_list(1..3) == [1, 2, 3]
end
test "uniq/1" do
assert Enum.uniq(1..3) == [1, 2, 3]
end
test "uniq_by/2" do
assert Enum.uniq_by(1..3, fn x -> x end) == [1, 2, 3]
end
test "unzip/1" do
assert_raise FunctionClauseError, fn -> Enum.unzip(1..3) end
end
test "with_index/2" do
assert Enum.with_index(1..3) == [{1, 0}, {2, 1}, {3, 2}]
assert Enum.with_index(1..3, 3) == [{1, 3}, {2, 4}, {3, 5}]
end
test "zip/2" do
assert Enum.zip([:a, :b], 1..2) == [{:a, 1}, {:b, 2}]
assert Enum.zip([:a, :b], 1..4) == [{:a, 1}, {:b, 2}]
assert Enum.zip([:a, :b, :c, :d], 1..2) == [{:a, 1}, {:b, 2}]
assert Enum.zip(1..2, [:a, :b]) == [{1, :a}, {2, :b}]
assert Enum.zip(1..4, [:a, :b]) == [{1, :a}, {2, :b}]
assert Enum.zip(1..2, [:a, :b, :c, :d]) == [{1, :a}, {2, :b}]
assert Enum.zip(1..2, 1..2) == [{1, 1}, {2, 2}]
assert Enum.zip(1..4, 1..2) == [{1, 1}, {2, 2}]
assert Enum.zip(1..2, 1..4) == [{1, 1}, {2, 2}]
end
end
defmodule EnumTest.Map do
# Some cases are inlined for ranges which means we need
# to verify them using maps or mapsets.
use ExUnit.Case, async: true
test "take_random/2" do
# corner cases, independent of the seed
assert_raise FunctionClauseError, fn -> Enum.take_random(1..2, -1) end
assert Enum.take_random(%{a: 1}, 0) == []
assert Enum.take_random(%{a: 1}, 2) == [a: 1]
assert Enum.take_random(%{a: 1, b: 2}, 0) == []
# set a fixed seed so the test can be deterministic
# please note the order of following assertions is important
map = %{a: 1, b: 2, c: 3}
seed1 = {1406, 407414, 139258}
seed2 = {1406, 421106, 567597}
:rand.seed(:exsplus, seed1)
assert Enum.take_random(map, 1) == [b: 2]
assert Enum.take_random(map, 2) == [c: 3, a: 1]
assert Enum.take_random(map, 3) == [a: 1, c: 3, b: 2]
assert Enum.take_random(map, 4) == [b: 2, c: 3, a: 1]
:rand.seed(:exsplus, seed2)
assert Enum.take_random(map, 1) == [c: 3]
assert Enum.take_random(map, 2) == [a: 1, b: 2]
assert Enum.take_random(map, 3) == [a: 1, b: 2, c: 3]
assert Enum.take_random(map, 4) == [b: 2, a: 1, c: 3]
end
test "reverse/1" do
assert Enum.reverse(%{}) == []
assert Enum.reverse(MapSet.new) == []
assert Enum.reverse(%{a: 1, b: 2, c: 3}) == [c: 3, b: 2, a: 1]
end
test "reverse/2" do
assert Enum.reverse([a: 1, b: 2, c: 3, a: 1], %{x: 1, y: 2, z: 3}) ==
[a: 1, c: 3, b: 2, a: 1, x: 1, y: 2, z: 3]
assert Enum.reverse([], %{a: 1}) == [a: 1]
assert Enum.reverse([], %{}) == []
assert Enum.reverse(%{a: 1}, []) == [a: 1]
assert Enum.reverse(MapSet.new, %{}) == []
end
test "fetch/2" do
map = %{a: 1, b: 2, c: 3, d: 4, e: 5}
assert Enum.fetch(map, 0) == {:ok, {:a, 1}}
assert Enum.fetch(map, -2) == {:ok, {:d, 4}}
assert Enum.fetch(map, -6) == :error
assert Enum.fetch(map, 5) == :error
assert Enum.fetch(%{}, 0) == :error
assert Stream.take(map, 3) |> Enum.fetch(3) == :error
assert Stream.take(map, 5) |> Enum.fetch(4) == {:ok, {:e, 5}}
end
test "slice/2" do
map = %{a: 1, b: 2, c: 3, d: 4, e: 5}
assert Enum.slice(map, 0..0) == [a: 1]
assert Enum.slice(map, 0..1) == [a: 1, b: 2]
assert Enum.slice(map, 0..2) == [a: 1, b: 2, c: 3]
end
test "slice/3" do
map = %{a: 1, b: 2, c: 3, d: 4, e: 5}
assert Enum.slice(map, 1, 2) == [b: 2, c: 3]
assert Enum.slice(map, 1, 0) == []
assert Enum.slice(map, 2, 5) == [c: 3, d: 4, e: 5]
assert Enum.slice(map, 2, 6) == [c: 3, d: 4, e: 5]
assert Enum.slice(map, 5, 5) == []
assert Enum.slice(map, 6, 5) == []
assert Enum.slice(map, 6, 0) == []
assert Enum.slice(map, -6, 0) == []
assert Enum.slice(map, -6, 5) == []
assert Enum.slice(map, -2, 5) == [d: 4, e: 5]
assert Enum.slice(map, -3, 1) == [c: 3]
assert_raise FunctionClauseError, fn ->
Enum.slice(map, 0, -1)
end
assert_raise FunctionClauseError, fn ->
Enum.slice(map, 0.99, 0)
end
assert_raise FunctionClauseError, fn ->
Enum.slice(map, 0, 0.99)
end
assert Enum.slice(map, 0, 0) == []
assert Enum.slice(map, 0, 1) == [a: 1]
assert Enum.slice(map, 0, 2) == [a: 1, b: 2]
assert Enum.slice(map, 1, 2) == [b: 2, c: 3]
assert Enum.slice(map, 1, 0) == []
assert Enum.slice(map, 2, 5) == [c: 3, d: 4, e: 5]
assert Enum.slice(map, 2, 6) == [c: 3, d: 4, e: 5]
assert Enum.slice(map, 5, 5) == []
assert Enum.slice(map, 6, 5) == []
assert Enum.slice(map, 6, 0) == []
assert Enum.slice(map, -6, 0) == []
assert Enum.slice(map, -6, 5) == []
assert Enum.slice(map, -2, 5) == [d: 4, e: 5]
assert Enum.slice(map, -3, 1) == [c: 3]
assert_raise FunctionClauseError, fn ->
Enum.slice(map, 0, -1)
end
assert_raise FunctionClauseError, fn ->
Enum.slice(map, 0.99, 0)
end
assert_raise FunctionClauseError, fn ->
Enum.slice(map, 0, 0.99)
end
end
end
defmodule EnumTest.SideEffects do
use ExUnit.Case, async: true
import ExUnit.CaptureIO
import PathHelpers
test "take/2 with side effects" do
stream = Stream.unfold(1, fn x -> IO.puts x; {x, x + 1} end)
assert capture_io(fn ->
Enum.take(stream, 1)
end) == "1\n"
end
test "take/2 does not consume next without a need" do
path = tmp_path("oneliner.txt")
File.mkdir(Path.dirname(path))
try do
File.write!(path, "ONE")
File.open!(path, [], fn file ->
iterator = IO.stream(file, :line)
assert Enum.take(iterator, 1) == ["ONE"]
assert Enum.take(iterator, 5) == []
end)
after
File.rm(path)
end
end
test "take/2 with no item works as no-op" do
iterator = File.stream!(fixture_path("unknown.txt"))
assert Enum.take(iterator, 0) == []
assert Enum.take(iterator, 0) == []
assert Enum.take(iterator, 0) == []
assert Enum.take(iterator, 0) == []
end
end
| 34.201028 | 123 | 0.515033 |
7306db8555a05a94bf92ac27deeb4fe926967aec | 899 | ex | Elixir | apps/testchain/lib/testchain/evm/details.ex | makerdao/qa_backend_gateway | 38e9a3f3f4b66212f1ee9d38b3b698a2a1f9a809 | [
"Apache-2.0"
] | 1 | 2020-10-23T19:25:27.000Z | 2020-10-23T19:25:27.000Z | apps/testchain/lib/testchain/evm/details.ex | makerdao/qa_backend_gateway | 38e9a3f3f4b66212f1ee9d38b3b698a2a1f9a809 | [
"Apache-2.0"
] | 5 | 2019-01-11T11:48:08.000Z | 2019-01-16T17:29:23.000Z | apps/testchain/lib/testchain/evm/details.ex | makerdao/qa_backend_gateway | 38e9a3f3f4b66212f1ee9d38b3b698a2a1f9a809 | [
"Apache-2.0"
] | 7 | 2019-10-09T05:49:52.000Z | 2022-03-23T16:48:45.000Z | defmodule Staxx.Testchain.EVM.Details do
@moduledoc """
EVM Details
Contain list of EVM operational values
"""
alias Staxx.Testchain
alias Staxx.Testchain.EVM.Account
@type t :: %__MODULE__{
id: Testchain.evm_id(),
coinbase: binary,
accounts: [Account.t()],
rpc_url: binary,
ws_url: binary,
gas_limit: pos_integer(),
network_id: pos_integer()
}
@derive Jason.Encoder
@enforce_keys [:id]
defstruct id: nil,
coinbase: "",
accounts: [],
rpc_url: "",
ws_url: "",
gas_limit: 6_000_000,
network_id: Application.get_env(:testchain, :default_chain_id)
end
# defimpl Jason.Encoder, for: Staxx.Testchain.EVM.Details do
# def encode(value, opts) do
# value
# |> Map.from_struct()
# |> Jason.Encode.map(opts)
# end
# end
| 23.657895 | 74 | 0.581758 |
730729f8ebbb2fd0b2b549211a906967447d9e74 | 537 | exs | Elixir | examples/elixir/get_started/03-routing-over-transport-initiator.exs | MichalDolata/ockam | 99046a3efe87fb45056a724dddbff288cb63ddfc | [
"Apache-2.0"
] | null | null | null | examples/elixir/get_started/03-routing-over-transport-initiator.exs | MichalDolata/ockam | 99046a3efe87fb45056a724dddbff288cb63ddfc | [
"Apache-2.0"
] | null | null | null | examples/elixir/get_started/03-routing-over-transport-initiator.exs | MichalDolata/ockam | 99046a3efe87fb45056a724dddbff288cb63ddfc | [
"Apache-2.0"
] | null | null | null | ["install.exs"] |> Enum.map(&Code.require_file/1)
alias Ockam.Transport.TCPAddress
Ockam.Transport.TCP.start()
# Register this process as address "app".
Ockam.Node.register_address("app", self())
# Prepare our message.
message = %{onward_route: [TCPAddress.new("localhost", 4000), "echoer"], return_route: ["app"], payload: "Hello Ockam!"}
# Send the message to the worker at address "echoer".
Ockam.Router.route(message)
# Wait to receive a reply
receive do
message -> IO.puts("Address: app\t Received: #{inspect(message)}")
end
| 28.263158 | 120 | 0.722533 |
730751c8bae04a94cf0d0924a7efae2b4ae4a22d | 2,332 | exs | Elixir | test/oli/delivery/page/page_context_test.exs | candert1/oli-torus | b7408f7d7c04cc3e9cf537873d98c3a586ec3a66 | [
"MIT"
] | null | null | null | test/oli/delivery/page/page_context_test.exs | candert1/oli-torus | b7408f7d7c04cc3e9cf537873d98c3a586ec3a66 | [
"MIT"
] | null | null | null | test/oli/delivery/page/page_context_test.exs | candert1/oli-torus | b7408f7d7c04cc3e9cf537873d98c3a586ec3a66 | [
"MIT"
] | null | null | null | defmodule Oli.Delivery.Page.PageContextTest do
use Oli.DataCase
alias Oli.Delivery.Page.PageContext
describe "page context" do
setup do
content = %{
"stem" => "1",
"authoring" => %{
"parts" => [
%{
"id" => "1",
"responses" => [],
"scoringStrategy" => "best",
"evaluationStrategy" => "regex"
}
]
}
}
map =
Seeder.base_project_with_resource2()
|> Seeder.create_section()
|> Seeder.add_objective("objective one", :o1)
o = Map.get(map, :o1).revision.resource_id
map =
Seeder.add_activity(
map,
%{title: "one", objectives: %{"1" => [o]}, content: content},
:a1
)
|> Seeder.add_activity(%{title: "two", content: %{"stem" => "3"}}, :a2)
|> Seeder.add_user(%{}, :user1)
attrs = %{
title: "page1",
content: %{
"model" => [
%{"type" => "activity-reference", "activity_id" => Map.get(map, :a1).resource.id},
%{"type" => "activity-reference", "activity_id" => Map.get(map, :a2).resource.id}
]
},
objectives: %{"attached" => []}
}
Seeder.ensure_published(map.publication.id)
Seeder.add_page(map, attrs, :p1)
|> Seeder.create_section_resources()
end
test "create_context/2 returns the activities mapped correctly",
%{
section: section,
p1: p1,
a1: a1,
user1: user,
container: %{resource: container_resource, revision: container_revision}
} = map do
page1 = Map.get(map, :page1)
page2 = Map.get(map, :page2)
publication = Map.get(map, :publication)
Seeder.replace_pages_with(
[page1, %{id: p1.resource.id}, page2],
container_resource,
container_revision,
publication
)
Seeder.rebuild_section_resources(%{section: section, publication: publication})
context = PageContext.create_for_visit(section, p1.revision.slug, user)
# verify activities map
assert Map.get(context.activities, a1.resource.id).model != nil
# verify objectives map
assert context.objectives == ["objective one"]
end
end
end
| 27.116279 | 94 | 0.536449 |
73076474054d8765cc313ea30c55ad35cbdf3267 | 2,419 | ex | Elixir | lib/advent_of_code/day-11/solution.ex | woojiahao/advent-of-code-2021 | 200737bf708fd7277b6dce517f30dcfcd02c4e57 | [
"MIT"
] | null | null | null | lib/advent_of_code/day-11/solution.ex | woojiahao/advent-of-code-2021 | 200737bf708fd7277b6dce517f30dcfcd02c4e57 | [
"MIT"
] | null | null | null | lib/advent_of_code/day-11/solution.ex | woojiahao/advent-of-code-2021 | 200737bf708fd7277b6dce517f30dcfcd02c4e57 | [
"MIT"
] | null | null | null | defmodule AdventOfCode.DayElevenSolution do
@cap 9
@coords for r <- 0..@cap, c <- 0..@cap, do: {r, c}
@neighbors @coords
|> Map.new(fn {r, c} ->
n =
[
{r - 1, c},
{r + 1, c},
{r, c - 1},
{r, c + 1},
{r - 1, c - 1},
{r - 1, c + 1},
{r + 1, c - 1},
{r + 1, c + 1}
]
|> Enum.filter(fn {nr, nc} ->
nr >= 0 and nc >= 0 and nr <= @cap and nc <= @cap
end)
{{r, c}, n}
end)
defp load_data() do
AdventOfCode.load_data(11, "data.txt")
|> Enum.map(&String.graphemes/1)
|> Enum.flat_map(& &1)
|> Enum.map(&String.to_integer/1)
|> Enum.chunk_every(@cap + 1)
|> Enum.with_index()
|> Enum.map(fn {r, row} ->
r
|> Enum.with_index()
|> Enum.map(fn {c, col} -> {{row, col}, c} end)
end)
|> Enum.flat_map(& &1)
|> Map.new()
end
defp update_n(g, [], _), do: g
defp update_n(g, [n | rest], f), do: update_n(g |> Map.update(n, 0, &f.(&1)), rest, f)
defp propagate(g, flashed, true), do: {g, flashed}
defp propagate(g, flashed, false) do
flashing = g |> Enum.filter(&(elem(&1, 1) > 9)) |> Enum.map(&elem(&1, 0)) |> MapSet.new()
u_flashed = MapSet.union(flashed, flashing)
u_g =
g
|> Enum.reduce(g, fn
{{k, v}, s}, acc when s > 9 ->
acc
|> Map.update({k, v}, 0, fn _ -> 0 end)
|> update_n(@neighbors[{k, v}], &(&1 + 1))
_, acc ->
acc
end)
a_g = u_g |> update_n(MapSet.to_list(u_flashed), fn _ -> 0 end)
propagate(a_g, u_flashed, a_g |> Map.values() |> Enum.all?(&(&1 <= 9)))
end
defp step(g) do
ug = g |> Map.new(fn {{k, v}, s} -> {{k, v}, s + 1} end)
propagate(ug, MapSet.new(), false)
end
defp solve_one(_, total, 0), do: total
defp solve_one(g, total, n) do
{u_g, flashed} = step(g)
solve_one(u_g, total + MapSet.size(flashed), n - 1)
end
def part_one() do
g = load_data()
solve_one(g, 0, 100)
end
defp solve_two(g, n) do
{u_g, _} = step(g)
if Map.values(u_g) |> Enum.all?(&(&1 == 0)),
do: n,
else: solve_two(u_g, n + 1)
end
def part_two() do
g = load_data()
solve_two(g, 1)
end
end
| 25.197917 | 93 | 0.447292 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.