hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
033acc2df49561bfb5f5b152ae412c145e1fff23 | 1,091 | ex | Elixir | lib/gallery.ex | nickyeager/nickdotcom | e1a0f32c44da2111c9d8b22800edeb831d893b5e | [
"MIT"
] | null | null | null | lib/gallery.ex | nickyeager/nickdotcom | e1a0f32c44da2111c9d8b22800edeb831d893b5e | [
"MIT"
] | 2 | 2021-03-10T19:22:36.000Z | 2021-05-11T15:05:23.000Z | lib/gallery.ex | nickyeager/phoenix-starter | 6940499c28b460916d610929e6a6fec256345018 | [
"MIT"
] | null | null | null | defmodule Gallery do
@unsplash_url "https://images.unsplash.com"
@slide_show_interval 500
@ids [
"photo-1562971179-4ad6903a7ed6",
"photo-1552673597-e3cd6747a996",
"photo-1561133036-61a7ed56b424",
"photo-1530717449302-271006cdc1bf"
]
def get_slide_show_interval, do: @slide_show_interval
def thumb_url(id), do: image_url(id, %{w: 100, h: 100, fit: "crop"})
def large_url(id), do: image_url(id, %{h: 500, fit: "crop"})
def image_ids(), do: @ids
def first_id(ids \\ @ids) do
List.first(ids)
end
def prev_image_id(ids\\@ids, id) do
Enum.at(ids, prev_index(ids, id))
end
def prev_index(ids, id) do
ids
|> Enum.find_index(& &1 == id)
|> Kernel.-(1)
end
def next_image_id(ids\\@ids, id) do
Enum.at(ids, next_index(ids, id), first_id(ids))
end
def next_index(ids, id) do
ids
|> Enum.find_index(& &1 == id)
|> Kernel.+(1)
end
def image_url(image_id, params) do
URI.parse(@unsplash_url)
|> URI.merge(image_id)
|> Map.put(:query, URI.encode_query(params))
|> URI.to_string()
end
end | 21.392157 | 70 | 0.638863 |
033aee447a8603d67346705b01ff6aa944d718b7 | 2,076 | ex | Elixir | clients/tasks/lib/google_api/tasks/v1/model/task_list.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/tasks/lib/google_api/tasks/v1/model/task_list.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/tasks/lib/google_api/tasks/v1/model/task_list.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Tasks.V1.Model.TaskList do
@moduledoc """
## Attributes
- etag (String.t): ETag of the resource. Defaults to: `null`.
- id (String.t): Task list identifier. Defaults to: `null`.
- kind (String.t): Type of the resource. This is always \"tasks#taskList\". Defaults to: `null`.
- selfLink (String.t): URL pointing to this task list. Used to retrieve, update, or delete this task list. Defaults to: `null`.
- title (String.t): Title of the task list. Defaults to: `null`.
- updated (DateTime.t): Last modification time of the task list (as a RFC 3339 timestamp). Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:etag => any(),
:id => any(),
:kind => any(),
:selfLink => any(),
:title => any(),
:updated => DateTime.t()
}
field(:etag)
field(:id)
field(:kind)
field(:selfLink)
field(:title)
field(:updated, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.Tasks.V1.Model.TaskList do
def decode(value, options) do
GoogleApi.Tasks.V1.Model.TaskList.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Tasks.V1.Model.TaskList do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.952381 | 129 | 0.691233 |
033af03767e0d5e1ba972955f5f23ddc33ed1955 | 948 | exs | Elixir | config/config.exs | macdougall/mbtadepartures | a8c1f5a8b71570c2761fd7021875e9a435fe7caf | [
"MIT"
] | null | null | null | config/config.exs | macdougall/mbtadepartures | a8c1f5a8b71570c2761fd7021875e9a435fe7caf | [
"MIT"
] | null | null | null | config/config.exs | macdougall/mbtadepartures | a8c1f5a8b71570c2761fd7021875e9a435fe7caf | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :mbtadepartures,
namespace: Departures
# Configures the endpoint
config :mbtadepartures, Departures.Endpoint,
url: [host: "localhost"],
secret_key_base: "KXybF5AkJ4NrSGBN58hPbeX/aW4gIT0qpT6aDVfJj7nujjtfRG7b3/68/2Mt+C9P",
render_errors: [view: Departures.ErrorView, accepts: ~w(html json)],
pubsub: [name: Departures.PubSub,
adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 33.857143 | 86 | 0.766878 |
033b3acfbd7b214f357d979638824ab5957ac229 | 3,326 | exs | Elixir | test/task_bunny/status/worker_test.exs | sviik/task_bunny | 373836828289107b8ed917bd7dc5aeb5428af335 | [
"MIT"
] | 207 | 2017-02-03T12:23:24.000Z | 2022-03-22T17:03:23.000Z | test/task_bunny/status/worker_test.exs | sviik/task_bunny | 373836828289107b8ed917bd7dc5aeb5428af335 | [
"MIT"
] | 45 | 2017-02-06T09:48:43.000Z | 2021-11-10T09:28:38.000Z | test/task_bunny/status/worker_test.exs | sviik/task_bunny | 373836828289107b8ed917bd7dc5aeb5428af335 | [
"MIT"
] | 33 | 2017-02-06T08:16:37.000Z | 2021-09-13T00:38:16.000Z | defmodule TaskBunny.Status.WorkerTest do
use ExUnit.Case, async: false
import TaskBunny.QueueTestHelper
alias TaskBunny.{Config, Queue, JobTestHelper}
alias JobTestHelper.TestJob
@host :worker_test
@supervisor :worker_test_supervisor
@worker_supervisor :worker_test_worker_supervisor
@publisher :workert_test_publisher
@queue1 "task_bunny.status.worker_test1"
@queue2 "task_bunny.status.worker_test2"
defmodule RejectJob do
use TaskBunny.Job
def perform(payload) do
JobTestHelper.Tracer.performed(payload)
:error
end
def retry_interval, do: 1
def max_retry, do: 0
end
defp find_worker(workers, queue) do
Enum.find(workers, fn %{queue: w_queue} -> queue == w_queue end)
end
defp all_queues do
Queue.queue_with_subqueues(@queue1) ++ Queue.queue_with_subqueues(@queue2)
end
defp mock_config do
workers = [
[host: @host, queue: @queue1, concurrency: 3],
[host: @host, queue: @queue2, concurrency: 3]
]
:meck.new(Config, [:passthrough])
:meck.expect(Config, :hosts, fn -> [@host] end)
:meck.expect(Config, :connect_options, fn @host -> "amqp://localhost" end)
:meck.expect(Config, :workers, fn -> workers end)
end
setup do
clean(all_queues())
mock_config()
JobTestHelper.setup()
TaskBunny.Supervisor.start_link(@supervisor, @worker_supervisor, @publisher)
JobTestHelper.wait_for_connection(@host)
Queue.declare_with_subqueues(:default, @queue1)
Queue.declare_with_subqueues(:default, @queue2)
on_exit(fn ->
:meck.unload()
JobTestHelper.teardown()
end)
:ok
end
describe "runners" do
test "running with no jobs being performed" do
%{workers: workers} = TaskBunny.Status.overview(@supervisor)
%{runners: runner_count} = List.first(workers)
assert runner_count == 0
end
test "running with jobs being performed" do
payload = %{"sleep" => 10_000}
TestJob.enqueue(payload, host: @host, queue: @queue1)
TestJob.enqueue(payload, host: @host, queue: @queue1)
JobTestHelper.wait_for_perform(2)
%{workers: workers} = TaskBunny.Status.overview(@supervisor)
%{runners: runner_count} = find_worker(workers, @queue1)
assert runner_count == 2
end
end
describe "job stats" do
test "jobs succeeded" do
payload = %{"hello" => "world1"}
TestJob.enqueue(payload, host: @host, queue: @queue1)
JobTestHelper.wait_for_perform()
%{workers: workers} = TaskBunny.Status.overview(@supervisor)
%{stats: stats} = find_worker(workers, @queue1)
assert stats.succeeded == 1
end
test "jobs failed" do
payload = %{"fail" => "fail"}
TestJob.enqueue(payload, host: @host, queue: @queue1)
JobTestHelper.wait_for_perform()
%{workers: workers} = TaskBunny.Status.overview(@supervisor)
%{stats: stats} = find_worker(workers, @queue1)
assert stats.failed == 1
end
test "jobs rejected" do
payload = %{"fail" => "fail"}
RejectJob.enqueue(payload, host: @host, queue: @queue2)
JobTestHelper.wait_for_perform()
%{workers: workers} = TaskBunny.Status.overview(@supervisor)
%{stats: stats} = find_worker(workers, @queue2)
assert stats.rejected == 1
end
end
end
| 25.782946 | 80 | 0.669874 |
033b4374e9d362b491ae328fc4bf17156ada83a7 | 2,514 | ex | Elixir | clients/identity_toolkit/lib/google_api/identity_toolkit/v3/model/signup_new_user_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/identity_toolkit/lib/google_api/identity_toolkit/v3/model/signup_new_user_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/identity_toolkit/lib/google_api/identity_toolkit/v3/model/signup_new_user_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.IdentityToolkit.V3.Model.SignupNewUserResponse do
@moduledoc """
Response of signing up new user, creating anonymous user or anonymous user reauth.
## Attributes
* `displayName` (*type:* `String.t`, *default:* `nil`) - The name of the user.
* `email` (*type:* `String.t`, *default:* `nil`) - The email of the user.
* `expiresIn` (*type:* `String.t`, *default:* `nil`) - If idToken is STS id token, then this field will be expiration time of STS id token in seconds.
* `idToken` (*type:* `String.t`, *default:* `nil`) - The Gitkit id token to login the newly sign up user.
* `kind` (*type:* `String.t`, *default:* `identitytoolkit#SignupNewUserResponse`) - The fixed string "identitytoolkit#SignupNewUserResponse".
* `localId` (*type:* `String.t`, *default:* `nil`) - The RP local ID of the user.
* `refreshToken` (*type:* `String.t`, *default:* `nil`) - If idToken is STS id token, then this field will be refresh token.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:displayName => String.t(),
:email => String.t(),
:expiresIn => String.t(),
:idToken => String.t(),
:kind => String.t(),
:localId => String.t(),
:refreshToken => String.t()
}
field(:displayName)
field(:email)
field(:expiresIn)
field(:idToken)
field(:kind)
field(:localId)
field(:refreshToken)
end
defimpl Poison.Decoder, for: GoogleApi.IdentityToolkit.V3.Model.SignupNewUserResponse do
def decode(value, options) do
GoogleApi.IdentityToolkit.V3.Model.SignupNewUserResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.IdentityToolkit.V3.Model.SignupNewUserResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.676923 | 154 | 0.690135 |
033b4dca000001aca4853bbd0ae4327c87d8ef38 | 1,423 | ex | Elixir | lib/faker/commerce.ex | vforvova/faker | 9e600d5f3ca0c34e30f7923d415050786d9832cb | [
"MIT"
] | null | null | null | lib/faker/commerce.ex | vforvova/faker | 9e600d5f3ca0c34e30f7923d415050786d9832cb | [
"MIT"
] | null | null | null | lib/faker/commerce.ex | vforvova/faker | 9e600d5f3ca0c34e30f7923d415050786d9832cb | [
"MIT"
] | null | null | null | defmodule Faker.Commerce do
@moduledoc """
Functions for generating commerce related data
"""
@doc """
Returns a random color
"""
@spec color() :: String.t
def color do
Module.concat(__MODULE__, Faker.mlocale).color
end
@doc """
Returns a random department
"""
@spec department() :: String.t
def department do
Module.concat(__MODULE__, Faker.mlocale).department
end
@doc """
Returns a random number that represents a price
"""
@spec price() :: float
def price do
:crypto.rand_uniform(1, 1001) / 100.0
end
@doc """
Returns a complete product name, based on product adjectives, product materials, product names
"""
@spec product_name() :: String.t
def product_name do
Module.concat(__MODULE__, Faker.mlocale).product_name
end
@doc """
Returns a random adjective for a product
"""
@spec product_name_adjective() :: String.t
def product_name_adjective do
Module.concat(__MODULE__, Faker.mlocale).product_name_adjective
end
@doc """
Returns a random product material
"""
@spec product_name_material() :: String.t
def product_name_material do
Module.concat(__MODULE__, Faker.mlocale).product_name_material
end
@doc """
Returns a random product name
"""
@spec product_name_product() :: String.t
def product_name_product do
Module.concat(__MODULE__, Faker.mlocale).product_name_product
end
end
| 22.951613 | 96 | 0.695713 |
033b7cb57846f0a962405556dde602dcd1876eee | 7,988 | ex | Elixir | lib/mix/lib/mix/config.ex | DmitryKakurin/elixir | a5df6a5a830d4cff8b7c8da54342b66cab999e0f | [
"Apache-2.0"
] | 1 | 2020-01-14T18:44:56.000Z | 2020-01-14T18:44:56.000Z | lib/mix/lib/mix/config.ex | DmitryKakurin/elixir | a5df6a5a830d4cff8b7c8da54342b66cab999e0f | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/config.ex | DmitryKakurin/elixir | a5df6a5a830d4cff8b7c8da54342b66cab999e0f | [
"Apache-2.0"
] | 1 | 2018-01-09T20:10:59.000Z | 2018-01-09T20:10:59.000Z | defmodule Mix.Config do
# TODO: Convert them to hard deprecations on v1.13
@moduledoc deprecated: "Use Config and Config.Reader instead"
@moduledoc ~S"""
A simple configuration API and functions for managing config files.
## Setting configuration
Most commonly, this module is used to define your own configuration:
use Mix.Config
config :root_key,
key1: "value1",
key2: "value2"
import_config "#{Mix.env()}.exs"
`use Mix.Config` will import the functions `config/2`, `config/3`
and `import_config/1` to help you manage your configuration.
## Evaluating configuration
Once a configuration is written to a file, the functions in this
module can be used to read and merge said configuration. The `eval!/2`
function allows you evaluate a given configuration file and `merge/2`
allows to deep merge the results of multiple configurations. Those
functions should not be invoked by users writing configurations but
rather by library authors.
## Examples
The most common use of `Mix.Config` is to define application
configuration so that `Application.get_env/3` and other `Application`
functions can be used to retrieve or further change them.
Application config files are typically placed in the `config/`
directory of your Mix projects. For example, the following config
# config/config.exs
config :my_app, :key, "value"
will be automatically loaded by Mix and persisted into the
`:my_app`'s application environment, which can be accessed in
its source code as follows:
"value" = Application.fetch_env!(:my_app, :key1)
"""
@doc false
defmacro __using__(_) do
quote do
import Mix.Config, only: [config: 2, config: 3, import_config: 1]
end
end
@doc """
Configures the given `root_key`.
Keyword lists are always deep merged.
## Examples
The given `opts` are merged into the existing configuration
for the given `root_key`. Conflicting keys are overridden by the
ones specified in `opts`. For example, the application
configuration below
config :logger,
level: :warn,
backends: [:console]
config :logger,
level: :info,
truncate: 1024
will have a final configuration for `:logger` of:
[level: :info, backends: [:console], truncate: 1024]
"""
@doc deprecated: "Use the Config module instead"
defdelegate config(root_key, opts), to: Config
@doc """
Configures the given `key` for the given `root_key`.
Keyword lists are always deep merged.
## Examples
The given `opts` are merged into the existing values for `key`
in the given `root_key`. Conflicting keys are overridden by the
ones specified in `opts`. For example, the application
configuration below
config :ecto, Repo,
log_level: :warn,
adapter: Ecto.Adapters.Postgres
config :ecto, Repo,
log_level: :info,
pool_size: 10
will have a final value of the configuration for the `Repo`
key in the `:ecto` application of:
[log_level: :info, pool_size: 10, adapter: Ecto.Adapters.Postgres]
"""
@doc deprecated: "Use the Config module instead"
defdelegate config(root_key, key, opts), to: Config
@doc ~S"""
Imports configuration from the given file or files.
If `path_or_wildcard` is a wildcard, then all the files
matching that wildcard will be imported; if no file matches
the wildcard, no errors are raised. If `path_or_wildcard` is
not a wildcard but a path to a single file, then that file is
imported; in case the file doesn't exist, an error is raised.
If path/wildcard is a relative path/wildcard, it will be expanded
relatively to the directory the current configuration file is in.
## Examples
This is often used to emulate configuration across environments:
import_config "#{Mix.env()}.exs"
Or to import files from children in umbrella projects:
import_config "../apps/*/config/config.exs"
"""
@doc deprecated: "Use the Config module instead"
defmacro import_config(path_or_wildcard) do
quote do
Mix.Config.__import__!(unquote(path_or_wildcard), __DIR__)
end
end
@doc false
def __import__!(path_or_wildcard, dir) do
path_or_wildcard = Path.expand(path_or_wildcard, dir)
paths =
if String.contains?(path_or_wildcard, ~w(* ? [ {)) do
Path.wildcard(path_or_wildcard)
else
[path_or_wildcard]
end
for path <- paths do
Config.__import__!(path)
end
:ok
end
## Mix API
@doc """
Evaluates the given configuration file.
It accepts a list of `imported_paths` that should raise if attempted
to be imported again (to avoid recursive imports).
It returns a tuple with the configuration and the imported paths.
"""
@doc deprecated: "Use Config.Reader.read!/2 instead"
def eval!(file, imported_paths \\ []) do
Config.__eval__!(file, imported_paths)
end
@doc """
Reads the configuration file.
The same as `eval!/2` but only returns the configuration
in the given file, without returning the imported paths.
It exists for convenience purposes. For example, you could
invoke it inside your `mix.exs` to read some external data
you decided to move to a configuration file:
releases: Mix.Config.read!("rel/releases.exs")
"""
@doc deprecated: "Use Config.Reader.read_imports!/2 instead"
@spec read!(Path.t(), [Path.t()]) :: keyword
def read!(file, imported_paths \\ []) do
Config.__eval__!(file, imported_paths) |> elem(0)
end
@doc """
Merges two configurations.
The configurations are merged together with the values in
the second one having higher preference than the first in
case of conflicts. In case both values are set to keyword
lists, it deep merges them.
## Examples
iex> Mix.Config.merge([app: [k: :v1]], [app: [k: :v2]])
[app: [k: :v2]]
iex> Mix.Config.merge([app: [k: [v1: 1, v2: 2]]], [app: [k: [v2: :a, v3: :b]]])
[app: [k: [v1: 1, v2: :a, v3: :b]]]
iex> Mix.Config.merge([app1: []], [app2: []])
[app1: [], app2: []]
"""
@doc deprecated: "Use Config.Reader.merge/2 instead"
def merge(config1, config2) do
Config.__merge__(config1, config2)
end
@doc """
Persists the given configuration by modifying
the configured applications environment.
`config` should be a list of `{app, app_config}` tuples or a
`%{app => app_config}` map where `app` are the applications to
be configured and `app_config` are the configuration (as key-value
pairs) for each of those applications.
Returns the configured applications.
## Examples
Mix.Config.persist(logger: [level: :error], my_app: [my_config: 1])
#=> [:logger, :my_app]
"""
@doc deprecated: "Use Application.put_all_env/2 instead"
def persist(config) do
Application.put_all_env(config, persistent: true)
end
@doc false
@deprecated "Use eval!/2 instead"
def read_wildcard!(path, loaded_paths \\ []) do
paths =
if String.contains?(path, ~w(* ? [ {)) do
Path.wildcard(path)
else
[path]
end
Enum.reduce(paths, [], &merge(&2, read!(&1, loaded_paths)))
end
@doc false
@deprecated "Manually validate the data instead"
def validate!(config) do
validate!(config, "runtime")
end
defp validate!(config, file) do
if is_list(config) do
Enum.all?(config, fn
{app, value} when is_atom(app) ->
if Keyword.keyword?(value) do
true
else
raise ArgumentError,
"expected #{Path.relative_to_cwd(file)} config for app #{inspect(app)} " <>
"to return keyword list, got: #{inspect(value)}"
end
_ ->
false
end)
else
raise ArgumentError,
"expected #{Path.relative_to_cwd(file)} config to return " <>
"keyword list, got: #{inspect(config)}"
end
config
end
end
| 27.736111 | 93 | 0.670506 |
033b9e2fa3a39ad14d6c517243ae6c3bac9b3673 | 384 | ex | Elixir | web/views/error_view.ex | cadorfo/SchoolAgenda | 5dd99f3482f103f7a3ac5ef83a07a36d15bbe17d | [
"MIT"
] | null | null | null | web/views/error_view.ex | cadorfo/SchoolAgenda | 5dd99f3482f103f7a3ac5ef83a07a36d15bbe17d | [
"MIT"
] | null | null | null | web/views/error_view.ex | cadorfo/SchoolAgenda | 5dd99f3482f103f7a3ac5ef83a07a36d15bbe17d | [
"MIT"
] | null | null | null | defmodule SchoolAgenda.ErrorView do
use SchoolAgenda.Web, :view
def render("404.html", _assigns) do
"Page not found"
end
def render("500.html", _assigns) do
"Internal server error"
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.html", assigns
end
end
| 21.333333 | 47 | 0.705729 |
033bc1455ee3bb09afc8d19af41ea4b8f33c87cc | 309 | exs | Elixir | test/macro_test.exs | kianmeng/cldr_utils | e8353e473709c74fe3362a987cd615a4cd30f55c | [
"Apache-2.0"
] | 6 | 2019-07-19T17:49:11.000Z | 2021-03-20T14:38:28.000Z | test/macro_test.exs | kianmeng/cldr_utils | e8353e473709c74fe3362a987cd615a4cd30f55c | [
"Apache-2.0"
] | 5 | 2020-01-28T06:11:50.000Z | 2022-03-22T01:31:16.000Z | test/macro_test.exs | kianmeng/cldr_utils | e8353e473709c74fe3362a987cd615a4cd30f55c | [
"Apache-2.0"
] | 3 | 2020-01-28T06:08:42.000Z | 2020-10-20T13:40:19.000Z | defmodule Support.Macro.Test do
use ExUnit.Case, async: true
import ExUnit.CaptureLog
test "warn once" do
assert capture_log(fn ->
defmodule M do
import Cldr.Macros
warn_once(:a, "Here we are")
end
end) =~ "Here we are"
end
end
| 20.6 | 43 | 0.556634 |
033be7487c1032151e8e2ac2abcf2425b616d4d0 | 182 | ex | Elixir | lib/ecto_batch.ex | peterkrenn/ecto-genstage-batcher-example | b9f935c5db3ee6127fef518d2197d020a36840f5 | [
"Apache-2.0"
] | 1 | 2018-11-27T09:31:06.000Z | 2018-11-27T09:31:06.000Z | lib/ecto_batch.ex | peterkrenn/ecto-genstage-batcher-example | b9f935c5db3ee6127fef518d2197d020a36840f5 | [
"Apache-2.0"
] | null | null | null | lib/ecto_batch.ex | peterkrenn/ecto-genstage-batcher-example | b9f935c5db3ee6127fef518d2197d020a36840f5 | [
"Apache-2.0"
] | null | null | null | defmodule EB do
@moduledoc """
Documentation for EB.
"""
@doc """
Hello world.
## Examples
iex> EB.hello
:world
"""
def hello do
:world
end
end
| 9.578947 | 23 | 0.532967 |
033bea36ee53392daf97e552c765b6d1d26a1953 | 4,571 | ex | Elixir | debian/init.d.ex | amq5/WCMCommander | 15d17871d1903b8520a9c4452dab200698899812 | [
"MIT"
] | 240 | 2015-03-31T15:47:21.000Z | 2022-03-25T23:44:52.000Z | debian/init.d.ex | amq5/WCMCommander | 15d17871d1903b8520a9c4452dab200698899812 | [
"MIT"
] | 134 | 2015-01-01T14:04:19.000Z | 2015-03-26T17:26:33.000Z | debian/init.d.ex | amq5/WCMCommander | 15d17871d1903b8520a9c4452dab200698899812 | [
"MIT"
] | 54 | 2015-04-03T04:46:30.000Z | 2021-12-05T18:12:18.000Z | #!/bin/sh
### BEGIN INIT INFO
# Provides: wcm
# Required-Start: $local_fs $network $remote_fs $syslog
# Required-Stop: $local_fs $network $remote_fs $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: <Enter a short description of the software>
# Description: <Enter a long description of the software>
# <...>
# <...>
### END INIT INFO
# Author: skosarevsky <skosarevsky@unknown>
# Do NOT "set -e"
# PATH should only include /usr/* if it runs after the mountnfs.sh script
PATH=/sbin:/usr/sbin:/bin:/usr/bin
DESC="wcm"
NAME=wcm
DAEMON=/usr/sbin/wcm
DAEMON_ARGS=""
PIDFILE=/var/run/$NAME.pid
SCRIPTNAME=/etc/init.d/$NAME
# Exit if the package is not installed
[ -x "$DAEMON" ] || exit 0
# Read configuration variable file if it is present
[ -r /etc/default/$NAME ] && . /etc/default/$NAME
# Load the VERBOSE setting and other rcS variables
. /lib/init/vars.sh
# Define LSB log_* functions.
# Depend on lsb-base (>= 3.2-14) to ensure that this file is present
# and status_of_proc is working.
. /lib/lsb/init-functions
#
# Function that starts the daemon/service
#
do_start()
{
# Return
# 0 if daemon has been started
# 1 if daemon was already running
# 2 if daemon could not be started
start-stop-daemon --start --quiet --pidfile $PIDFILE --exec $DAEMON --test > /dev/null \
|| return 1
start-stop-daemon --start --quiet --pidfile $PIDFILE --exec $DAEMON -- \
$DAEMON_ARGS \
|| return 2
# The above code will not work for interpreted scripts, use the next
# six lines below instead (Ref: #643337, start-stop-daemon(8) )
#start-stop-daemon --start --quiet --pidfile $PIDFILE --startas $DAEMON \
# --name $NAME --test > /dev/null \
# || return 1
#start-stop-daemon --start --quiet --pidfile $PIDFILE --startas $DAEMON \
# --name $NAME -- $DAEMON_ARGS \
# || return 2
# Add code here, if necessary, that waits for the process to be ready
# to handle requests from services started subsequently which depend
# on this one. As a last resort, sleep for some time.
}
#
# Function that stops the daemon/service
#
do_stop()
{
# Return
# 0 if daemon has been stopped
# 1 if daemon was already stopped
# 2 if daemon could not be stopped
# other if a failure occurred
start-stop-daemon --stop --quiet --retry=TERM/30/KILL/5 --pidfile $PIDFILE --name $NAME
RETVAL="$?"
[ "$RETVAL" = 2 ] && return 2
# Wait for children to finish too if this is a daemon that forks
# and if the daemon is only ever run from this initscript.
# If the above conditions are not satisfied then add some other code
# that waits for the process to drop all resources that could be
# needed by services started subsequently. A last resort is to
# sleep for some time.
start-stop-daemon --stop --quiet --oknodo --retry=0/30/KILL/5 --exec $DAEMON
[ "$?" = 2 ] && return 2
# Many daemons don't delete their pidfiles when they exit.
rm -f $PIDFILE
return "$RETVAL"
}
#
# Function that sends a SIGHUP to the daemon/service
#
do_reload() {
#
# If the daemon can reload its configuration without
# restarting (for example, when it is sent a SIGHUP),
# then implement that here.
#
start-stop-daemon --stop --signal 1 --quiet --pidfile $PIDFILE --name $NAME
return 0
}
case "$1" in
start)
[ "$VERBOSE" != no ] && log_daemon_msg "Starting $DESC" "$NAME"
do_start
case "$?" in
0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;;
2) [ "$VERBOSE" != no ] && log_end_msg 1 ;;
esac
;;
stop)
[ "$VERBOSE" != no ] && log_daemon_msg "Stopping $DESC" "$NAME"
do_stop
case "$?" in
0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;;
2) [ "$VERBOSE" != no ] && log_end_msg 1 ;;
esac
;;
status)
status_of_proc "$DAEMON" "$NAME" && exit 0 || exit $?
;;
#reload|force-reload)
#
# If do_reload() is not implemented then leave this commented out
# and leave 'force-reload' as an alias for 'restart'.
#
#log_daemon_msg "Reloading $DESC" "$NAME"
#do_reload
#log_end_msg $?
#;;
restart|force-reload)
#
# If the "reload" option is implemented then remove the
# 'force-reload' alias
#
log_daemon_msg "Restarting $DESC" "$NAME"
do_stop
case "$?" in
0|1)
do_start
case "$?" in
0) log_end_msg 0 ;;
1) log_end_msg 1 ;; # Old process is still running
*) log_end_msg 1 ;; # Failed to start
esac
;;
*)
# Failed to stop
log_end_msg 1
;;
esac
;;
*)
#echo "Usage: $SCRIPTNAME {start|stop|restart|reload|force-reload}" >&2
echo "Usage: $SCRIPTNAME {start|stop|status|restart|force-reload}" >&2
exit 3
;;
esac
:
| 27.371257 | 89 | 0.659156 |
033bf63d5e523b5475c6906f8d98e68df3df914e | 209 | ex | Elixir | test/support/schemas/mysql/my_test_repo.ex | elixir-garage/ex_tenant | 2336ab92d4cf0e826fdd6bf6ce5fb0206095f007 | [
"Apache-2.0"
] | 1 | 2022-02-09T23:17:09.000Z | 2022-02-09T23:17:09.000Z | test/support/schemas/mysql/my_test_repo.ex | elixir-garage/ex_tenant | 2336ab92d4cf0e826fdd6bf6ce5fb0206095f007 | [
"Apache-2.0"
] | null | null | null | test/support/schemas/mysql/my_test_repo.ex | elixir-garage/ex_tenant | 2336ab92d4cf0e826fdd6bf6ce5fb0206095f007 | [
"Apache-2.0"
] | null | null | null | defmodule ExTenant.Test.Support.Schemas.Mysql.MyTestRepo do
@moduledoc """
we need to test the schema without any of our code
"""
use Ecto.Repo, otp_app: :ex_tenant, adapter: Ecto.Adapters.MyXQL
end
| 29.857143 | 66 | 0.741627 |
033bfeb869b3aa6dc0dc6ff2c93ac594247e5c71 | 1,724 | exs | Elixir | test/double_bypass_test.exs | carlosliracl/double_bypass | 0f40d31da84ee0c9828788e174a2f8a34f96e36f | [
"MIT"
] | 1 | 2021-09-10T17:58:49.000Z | 2021-09-10T17:58:49.000Z | test/double_bypass_test.exs | carlosliracl/double_bypass | 0f40d31da84ee0c9828788e174a2f8a34f96e36f | [
"MIT"
] | null | null | null | test/double_bypass_test.exs | carlosliracl/double_bypass | 0f40d31da84ee0c9828788e174a2f8a34f96e36f | [
"MIT"
] | 1 | 2022-01-28T13:10:24.000Z | 2022-01-28T13:10:24.000Z | defmodule DoubleBypass.Config do
@moduledoc false
@config %{
service_two_host: "www.test-url.com",
service_three_host: "www.test-url.com"
}
use Agent
def start_link do
Agent.start_link(fn -> @config end, name: __MODULE__)
end
def put(config, value) when is_atom(config) do
Agent.update(__MODULE__, &Map.put(&1, config, value))
end
def get(key) when is_atom(key) do
get([key])
end
def get(keys) when is_list(keys) do
get_in(Agent.get(__MODULE__, & &1), keys)
end
def stop do
Agent.stop(__MODULE__)
end
end
defmodule DoubleBypassTest do
use ExUnit.Case
@tags %{
service_one_bypass: [%{test: "params"}],
service_two_bypass: [%{test: "params"}],
service_three_bypass: [%{test: "params"}]
}
defp bypass_tags do
[
service_one_bypass: "SERVICE_HOST",
service_two_bypass: %{
setter: &DoubleBypass.Config.put(:service_two_host, &1),
getter: fn -> DoubleBypass.Config.get(:service_two_host) end
},
service_three_bypass: %{key: :service_three_host}
]
end
setup_all do
{:ok, _pid} = DoubleBypass.Config.start_link()
:ok
end
test "setup_bypass?" do
assert DoubleBypass.setup_bypass?(@tags, bypass_tags())
refute DoubleBypass.setup_bypass?(@tags, service_four_bypass: "SERVICE_FOUR_HOST")
end
test "setup_bypass" do
DoubleBypass.setup_bypass(@tags, bypass_tags(), %{
setter: &DoubleBypass.Config.put/2,
getter: &DoubleBypass.Config.get/1
})
HTTPoison.start()
HTTPoison.get!(System.get_env("SERVICE_HOST"))
HTTPoison.get!(DoubleBypass.Config.get(:service_two_host))
HTTPoison.get!(DoubleBypass.Config.get(:service_three_host))
end
end
| 23.297297 | 86 | 0.676914 |
033c0e5cfc36800ea03c5df81ebc762fc52f0587 | 5,104 | ex | Elixir | implementations/elixir/ockam/ockam/lib/ockam/protocol/mapping.ex | twittner/ockam | 96eadf99da42f7c35539c6e29010a657c579ccba | [
"Apache-2.0"
] | null | null | null | implementations/elixir/ockam/ockam/lib/ockam/protocol/mapping.ex | twittner/ockam | 96eadf99da42f7c35539c6e29010a657c579ccba | [
"Apache-2.0"
] | 10 | 2022-02-23T06:44:58.000Z | 2022-03-23T21:04:37.000Z | implementations/elixir/ockam/ockam/lib/ockam/protocol/mapping.ex | twittner/ockam | 96eadf99da42f7c35539c6e29010a657c579ccba | [
"Apache-2.0"
] | null | null | null | defmodule Ockam.Protocol.Mapping do
@moduledoc """
Protocol mapping helper module to encode and decode messages
with different protocols
Usage:
use Ockam.Protocol.Mapping
@impl true
def protocol_mapping() do
Ockam.Protocol.Mapping.mapping([
{:client, Protocol1},
{:server, Protocol2}
])
end
def handle_message(%Ockam.Message{payload: payload}, state) do
case decode_payload(payload) do
{:ok, Protocol1, message} ->
#response from Protocol1
{:ok, Protocol2, message} ->
#request from Protocol1
end
end
"""
alias Ockam.Bare.Extended, as: BareExtended
alias Ockam.Protocol
@type extended_schema() :: BareExtended.extended_schema()
@type schema_map() :: %{String.t() => extended_schema()}
@type mapping() :: %{
in: schema_map(),
out: schema_map(),
modules: %{String.t() => module()}
}
def client(protocol) do
mapping([{:client, protocol}])
end
def server(protocol) do
mapping([{:server, protocol}])
end
def mapping(protocol_specs) do
protocol_specs = expand_specs(protocol_specs)
check_conflicts(protocol_specs)
protocol_modules =
Enum.reduce(
protocol_specs,
%{},
fn {_type, mod, protocol}, mod_map ->
Map.put(mod_map, protocol.name, mod)
end
)
Enum.reduce(protocol_specs, %{in: %{}, out: %{}, modules: protocol_modules}, fn
{:client, _mod, protocol}, %{in: in_map, out: out_map, modules: modules} ->
name = protocol.name
%{
in: update_schema_map(in_map, name, protocol.response),
out: update_schema_map(out_map, name, protocol.request),
modules: modules
}
{:server, _mod, protocol}, %{in: in_map, out: out_map, modules: modules} ->
name = protocol.name
%{
in: update_schema_map(in_map, name, protocol.request),
out: update_schema_map(out_map, name, protocol.response),
modules: modules
}
end)
end
@type protocol_id() :: module() | String.t()
@spec decode_payload(binary(), mapping()) :: {:ok, protocol_id(), any()} | {:error, any()}
def decode_payload(data, mapping) do
in_map = mapping.in
case Protocol.base_decode(data) do
{:ok, %{protocol: name, data: protocol_data}} ->
with {:ok, schema} <- Map.fetch(in_map, name),
{:ok, decoded} <- BareExtended.decode(protocol_data, schema) do
protocol_id = protocol_id(mapping, name)
{:ok, protocol_id, decoded}
else
:error ->
{:error, {:unmatched_protocol, name, mapping}}
other ->
other
end
other ->
other
end
end
@spec encode_payload(protocol_id(), any(), mapping()) :: binary()
def encode_payload(module, data, mapping) when is_atom(module) do
name = Map.fetch!(module.protocol(), :name)
encode_payload(name, data, mapping)
end
def encode_payload(name, data, mapping) when is_binary(name) do
out_map = mapping.out
case Map.fetch(out_map, name) do
{:ok, schema} ->
encoded = BareExtended.encode(data, schema)
Protocol.base_encode(name, encoded)
:error ->
:erlang.error({:error, {:unmatched_protocol, name, mapping}})
end
end
defp protocol_id(mapping, name) do
case Map.get(mapping.modules, name) do
nil -> name
module -> module
end
end
defp expand_specs(protocol_specs) do
Enum.map(
protocol_specs,
fn
{type, module} when is_atom(module) -> {type, module, module.protocol()}
{type, %Protocol{} = protocol} -> {type, nil, protocol}
{type, map} when is_map(map) -> {type, nil, struct(Protocol, map)}
end
)
end
defp check_conflicts(protocol_specs) do
duplicate_names =
protocol_specs
|> Enum.map(fn {_type, _mod, protocol} -> protocol.name end)
|> Enum.frequencies()
|> Enum.filter(fn {_k, v} -> v > 1 end)
|> Enum.map(fn {k, _v} -> k end)
case duplicate_names do
[] ->
:ok
_list ->
raise(
"Protocol name conflict in #{inspect(protocol_specs)}. Duplicate names: #{inspect(duplicate_names)}"
)
end
end
@spec update_schema_map(schema_map(), String.t(), extended_schema() | nil) :: schema_map()
defp update_schema_map(map, _name, nil) do
map
end
defp update_schema_map(map, name, schema) do
Map.put(map, name, schema)
end
## Behaviour
@callback protocol_mapping() :: mapping()
defmacro __using__(_options) do
alias Ockam.Protocol.Mapping
quote do
@behaviour Ockam.Protocol.Mapping
def decode_payload(payload) do
mapping = protocol_mapping()
Mapping.decode_payload(payload, mapping)
end
def encode_payload(type, option, data) do
encode_payload(type, {option, data})
end
def encode_payload(type, data) do
mapping = protocol_mapping()
Mapping.encode_payload(type, data, mapping)
end
end
end
end
| 25.777778 | 110 | 0.612069 |
033c137b9fc4a83e41b3cfa8e603fdad2f2d35ad | 116 | exs | Elixir | .formatter.exs | mkrzywda/image_classifier | a9261cf6bff4fe9e35400954418e8712e79c6ee4 | [
"MIT"
] | 147 | 2018-06-11T04:33:38.000Z | 2022-03-23T07:24:57.000Z | .formatter.exs | mkrzywda/image_classifier | a9261cf6bff4fe9e35400954418e8712e79c6ee4 | [
"MIT"
] | 19 | 2019-07-10T17:10:34.000Z | 2021-04-18T11:16:58.000Z | .formatter.exs | mkrzywda/image_classifier | a9261cf6bff4fe9e35400954418e8712e79c6ee4 | [
"MIT"
] | 43 | 2015-08-26T06:41:11.000Z | 2022-03-11T12:36:17.000Z | # Used by "mix format"
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"],
line_length: 80
]
| 19.333333 | 70 | 0.577586 |
033c309b9c2f5bc0be07d7cc1b8607b135dd997e | 1,386 | ex | Elixir | projects/standup/test/support/data_case.ex | erik/sketches | 0a454ada58dee6db576e93cb2216dd750290329e | [
"MIT"
] | 1 | 2020-02-11T06:00:11.000Z | 2020-02-11T06:00:11.000Z | projects/standup/test/support/data_case.ex | erik/sketches | 0a454ada58dee6db576e93cb2216dd750290329e | [
"MIT"
] | 1 | 2017-09-23T19:41:29.000Z | 2017-09-25T05:12:38.000Z | projects/standup/test/support/data_case.ex | erik/sketches | 0a454ada58dee6db576e93cb2216dd750290329e | [
"MIT"
] | null | null | null | defmodule Standup.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Standup.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Standup.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Standup.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Standup.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Enum.reduce(opts, message, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
end
end
| 25.666667 | 77 | 0.680375 |
033c3a302e102c14f7c5a4fbb81d412969510dd7 | 1,064 | ex | Elixir | apps/examples/lib/examples/ping_pong/create_entry_order.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | null | null | null | apps/examples/lib/examples/ping_pong/create_entry_order.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | 78 | 2020-10-12T06:21:43.000Z | 2022-03-28T09:02:00.000Z | apps/examples/lib/examples/ping_pong/create_entry_order.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | null | null | null | defmodule Examples.PingPong.CreateEntryOrder do
alias Examples.PingPong.{Config, EntryPrice}
alias Tai.NewOrders.Submissions
@type advisor_process :: Tai.Advisor.advisor_name()
@type market_quote :: Tai.Markets.Quote.t()
@type config :: Config.t()
@type order :: Tai.NewOrders.Order.t()
@spec create(advisor_process, market_quote, config) :: {:ok, order}
def create(advisor_process, market_quote, config) do
price = EntryPrice.calculate(market_quote, config.product)
venue = market_quote.venue_id |> Atom.to_string()
credential = config.fee.credential_id |> Atom.to_string()
product_symbol = config.product.symbol |> Atom.to_string()
%Submissions.BuyLimitGtc{
venue: venue,
credential: credential,
venue_product_symbol: config.product.venue_symbol,
product_symbol: product_symbol,
price: price,
qty: config.max_qty,
product_type: config.product.type,
post_only: true,
order_updated_callback: {advisor_process, :entry_order}
}
|> Tai.NewOrders.create()
end
end
| 34.322581 | 69 | 0.716165 |
033c5104f3063cc48d2d05076008e03747aad694 | 525 | ex | Elixir | ui/lib/ui_web/views/error_view.ex | kpanic/nerves_morse | 0400179d0571dd92c2aa469b039c63e192a6b92c | [
"Apache-2.0"
] | 3 | 2019-01-25T21:51:06.000Z | 2020-01-12T21:52:29.000Z | ui/lib/ui_web/views/error_view.ex | kpanic/nerves_morse | 0400179d0571dd92c2aa469b039c63e192a6b92c | [
"Apache-2.0"
] | null | null | null | ui/lib/ui_web/views/error_view.ex | kpanic/nerves_morse | 0400179d0571dd92c2aa469b039c63e192a6b92c | [
"Apache-2.0"
] | null | null | null | defmodule UiWeb.ErrorView do
use UiWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.json", _assigns) do
# %{errors: %{detail: "Internal Server Error"}}
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.json" becomes
# "Not Found".
def template_not_found(template, _assigns) do
%{errors: %{detail: Phoenix.Controller.status_message_from_template(template)}}
end
end
| 30.882353 | 83 | 0.714286 |
033c7ccd571901007c8a5ede3aff3fe54a7fff73 | 2,823 | ex | Elixir | lib/nomad_client/model/task_group.ex | mrmstn/nomad_client | a586022e5eb4d166acba08b55b198ec079d4b118 | [
"Apache-2.0"
] | 8 | 2021-09-04T21:22:53.000Z | 2022-02-22T22:48:38.000Z | lib/nomad_client/model/task_group.ex | mrmstn/nomad_client | a586022e5eb4d166acba08b55b198ec079d4b118 | [
"Apache-2.0"
] | null | null | null | lib/nomad_client/model/task_group.ex | mrmstn/nomad_client | a586022e5eb4d166acba08b55b198ec079d4b118 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule NomadClient.Model.TaskGroup do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:Name,
:Count,
:Constraints,
:Affinities,
:Tasks,
:Spreads,
:Volumes,
:RestartPolicy,
:ReschedulePolicy,
:EphemeralDisk,
:Update,
:Migrate,
:Networks,
:Meta,
:Services,
:ShutdownDelay,
:Scaling
]
@type t :: %__MODULE__{
:Name => String.t() | nil,
:Count => integer() | nil,
:Constraints => [NomadClient.Model.Constraint.t()] | nil,
:Affinities => [NomadClient.Model.Affinity.t()] | nil,
:Tasks => [NomadClient.Model.Task.t()] | nil,
:Spreads => [NomadClient.Model.Spread.t()] | nil,
:Volumes => %{optional(String.t()) => NomadClient.Model.VolumeRequest.t()} | nil,
:RestartPolicy => NomadClient.Model.RestartPolicy.t() | nil,
:ReschedulePolicy => NomadClient.Model.ReschedulePolicy.t() | nil,
:EphemeralDisk => NomadClient.Model.EphemeralDisk.t() | nil,
:Update => NomadClient.Model.UpdateStrategy.t() | nil,
:Migrate => NomadClient.Model.MigrateStrategy.t() | nil,
:Networks => [NomadClient.Model.NetworkResource.t()] | nil,
:Meta => %{optional(String.t()) => String.t()} | nil,
:Services => [NomadClient.Model.Service.t()] | nil,
:ShutdownDelay => integer() | nil,
:Scaling => NomadClient.Model.ScalingPolicy.t() | nil
}
end
defimpl Poison.Decoder, for: NomadClient.Model.TaskGroup do
import NomadClient.Deserializer
def decode(value, options) do
value
|> deserialize(:Constraints, :list, NomadClient.Model.Constraint, options)
|> deserialize(:Affinities, :list, NomadClient.Model.Affinity, options)
|> deserialize(:Tasks, :list, NomadClient.Model.Task, options)
|> deserialize(:Spreads, :list, NomadClient.Model.Spread, options)
|> deserialize(:Volumes, :map, NomadClient.Model.VolumeRequest, options)
|> deserialize(:RestartPolicy, :struct, NomadClient.Model.RestartPolicy, options)
|> deserialize(:ReschedulePolicy, :struct, NomadClient.Model.ReschedulePolicy, options)
|> deserialize(:EphemeralDisk, :struct, NomadClient.Model.EphemeralDisk, options)
|> deserialize(:Update, :struct, NomadClient.Model.UpdateStrategy, options)
|> deserialize(:Migrate, :struct, NomadClient.Model.MigrateStrategy, options)
|> deserialize(:Networks, :list, NomadClient.Model.NetworkResource, options)
|> deserialize(:Services, :list, NomadClient.Model.Service, options)
|> deserialize(:Scaling, :struct, NomadClient.Model.ScalingPolicy, options)
end
end
| 39.208333 | 91 | 0.658874 |
033c83ea9258f0e3afe012e5401038725bdeef6c | 1,207 | exs | Elixir | summation.exs | bschmeck/sicp-elixir | 4e6f959f506d0cb4e2692177cbdad1c87779cf7d | [
"MIT"
] | 2 | 2019-08-26T13:17:27.000Z | 2020-09-24T13:16:07.000Z | summation.exs | bschmeck/sicp-elixir | 4e6f959f506d0cb4e2692177cbdad1c87779cf7d | [
"MIT"
] | null | null | null | summation.exs | bschmeck/sicp-elixir | 4e6f959f506d0cb4e2692177cbdad1c87779cf7d | [
"MIT"
] | null | null | null | defmodule Summation do
def sum(_, a, _, b) when a > b, do: 0
def sum(term_fn, a, next_fn, b) do
term_fn.(a) + sum(term_fn, next_fn.(a), next_fn, b)
end
def increment(x), do: x + 1
def cube(x), do: x * x * x
def identity(x), do: x
def sum_cubes(a, b), do: sum(&cube/1, a, &increment/1, b)
def sum_ints(a, b), do: sum(&identity/1, a, &increment/1, b)
def sum_pi(a, b) do
term = &(1 / (&1 * (&1 + 2)))
next = &(&1 + 4)
sum(term, a, next, b)
end
end
defmodule Integral do
def of(func, a, b, dx) do
add_dx = &(&1 + dx)
Summation.sum(func, (a + dx / 2), add_dx, b) * dx
end
end
ExUnit.start
defmodule SummationTests do
use ExUnit.Case, async: true
test "sum_cubes" do
assert Summation.sum_cubes(1, 10) == 3025
end
test "sum_ints" do
assert Summation.sum_ints(1, 10) == 55
end
test "sum_pi" do
assert 8 * Summation.sum_pi(1, 1000) == 3.139592655589783
end
end
defmodule IntegrationTests do
use ExUnit.Case, async: true
test "integral of x cubed from 0 to 1" do
assert Integral.of(&Summation.cube/1, 0, 1, 0.01) == 0.24998750000000042
assert Integral.of(&Summation.cube/1, 0, 1, 0.001) == 0.249999875000001
end
end
| 21.553571 | 76 | 0.61889 |
033c8bc9e46a8de35545cf7db08ed340fdae92ca | 592 | ex | Elixir | example/lib/live_phone_example_web/router.ex | workwithmax/live_phone | 1a5482a9032735dd201f2468643145f1ee0a3588 | [
"MIT"
] | 3 | 2021-02-28T22:27:58.000Z | 2021-12-08T03:18:10.000Z | example/lib/live_phone_example_web/router.ex | whitepaperclip/live_phone | bda5d554dd7d3df313fa15cd71eb2bcb8acda897 | [
"MIT"
] | 25 | 2021-02-26T16:15:46.000Z | 2022-03-24T08:10:51.000Z | example/lib/live_phone_example_web/router.ex | workwithmax/live_phone | 1a5482a9032735dd201f2468643145f1ee0a3588 | [
"MIT"
] | 2 | 2020-11-27T17:33:52.000Z | 2021-01-25T16:05:16.000Z | defmodule LivePhoneExampleWeb.Router do
use LivePhoneExampleWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_live_flash
plug :put_root_layout, {LivePhoneExampleWeb.LayoutView, :root}
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", LivePhoneExampleWeb do
pipe_through :browser
live "/", PageLive, :index
end
# Other scopes may use custom stacks.
# scope "/api", LivePhoneExampleWeb do
# pipe_through :api
# end
end
| 21.142857 | 66 | 0.704392 |
033c916ee6186b0287248983399eb2795c9cd47a | 2,522 | ex | Elixir | lib/home_bot/data_store.ex | marktermaat/home_bot | b6d0bd5e1e97b2b0c95e201618e27de4259378d3 | [
"MIT"
] | null | null | null | lib/home_bot/data_store.ex | marktermaat/home_bot | b6d0bd5e1e97b2b0c95e201618e27de4259378d3 | [
"MIT"
] | null | null | null | lib/home_bot/data_store.ex | marktermaat/home_bot | b6d0bd5e1e97b2b0c95e201618e27de4259378d3 | [
"MIT"
] | null | null | null | defmodule HomeBot.DataStore do
@moduledoc "Public interface for Data Stores"
alias HomeBot.DataStore.ChannelStore
alias HomeBot.DataStore.EnergyStore
alias HomeBot.DataStore.TemperatureStore
alias HomeBot.DataStore.EnergyPostgresStore
def add_subscriber(channel_id) do
ChannelStore.add_subscriber(channel_id)
end
def get_subscribers do
ChannelStore.get_subscribers()
end
def create_temperature_database_if_not_exists do
TemperatureStore.create_database_if_not_exists()
end
def write_temperature_data(data) do
TemperatureStore.write_temperature_data(data)
end
def get_latest_weather_data do
TemperatureStore.get_latest_weather_data()
end
def get_average_temperature_per_day(days \\ 48) do
TemperatureStore.get_average_temperature_per_day(days)
end
def get_average_temperature(start_time, end_time) do
TemperatureStore.get_average_temperature(start_time, end_time)
end
def get_latest_energy_measurement do
EnergyPostgresStore.get_latest_measurement()
end
def get_measurements_since(timestamp) do
EnergyStore.get_measurements_since(timestamp)
end
def get_gas_usage(group, start_time, end_time) do
EnergyStore.get_gas_usage(group, start_time, end_time)
end
def get_gas_usage_per_hour(days \\ 48) do
EnergyStore.get_gas_usage_per_hour(days)
end
def get_gas_usage_per_day(days \\ 48) do
EnergyStore.get_gas_usage_per_day(days)
end
def get_electricity_usage(group, start_time, end_time) do
EnergyStore.get_electricity_usage(group, start_time, end_time)
end
@spec get_energy_usage(NaiveDateTime.t(), NaiveDateTime.t(), integer(), String.t()) ::
list(map())
def get_energy_usage(start_time, end_time, group_quantity, group_unit) do
EnergyPostgresStore.get_energy_usage(start_time, end_time, group_quantity, group_unit)
end
def get_electricity_usage_per_hour(days \\ 3) do
EnergyStore.get_electricity_usage_per_hour(days)
end
def get_electricity_usage_per_day(days \\ 48) do
EnergyStore.get_electricity_usage_per_day(days)
end
def get_electricity_usage(minutes \\ 3) do
EnergyPostgresStore.get_electricity_usage(minutes)
end
def get_current_home_temperature do
HomeBot.DataStore.HomeClimateStore.get_latest_temperature()
end
def get_current_home_humidity do
HomeBot.DataStore.HomeClimateStore.get_latest_humidity()
end
def get_recent_home_climate_data do
HomeBot.DataStore.HomeClimateStore.get_recent_home_climate_data()
end
end
| 27.714286 | 90 | 0.797383 |
033c9fa2e0109530da92b0cdf9f01f18ffb556fb | 831 | ex | Elixir | lib/screens/v2/widget_instance/placeholder.ex | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | 3 | 2021-07-27T14:11:00.000Z | 2022-01-03T14:16:43.000Z | lib/screens/v2/widget_instance/placeholder.ex | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | 444 | 2021-03-10T20:57:17.000Z | 2022-03-31T16:00:35.000Z | lib/screens/v2/widget_instance/placeholder.ex | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | null | null | null | defmodule Screens.V2.WidgetInstance.Placeholder do
@moduledoc false
alias Screens.V2.WidgetInstance.Placeholder
defstruct color: nil,
slot_names: []
@type color :: :grey | :blue | :green | :red
@type t :: %__MODULE__{
color: color(),
slot_names: list(atom())
}
defimpl Screens.V2.WidgetInstance do
def priority(_), do: [100]
def serialize(%Placeholder{color: color}), do: %{color: color}
def slot_names(%Placeholder{slot_names: slot_names}), do: slot_names
def widget_type(_), do: :placeholder
def valid_candidate?(_instance), do: true
def audio_serialize(_instance), do: %{}
def audio_sort_key(_instance), do: 0
def audio_valid_candidate?(_instance), do: false
def audio_view(_instance), do: ScreensWeb.V2.Audio.PlaceholderView
end
end
| 30.777778 | 72 | 0.6787 |
033ca8fae26dd057be282a6d60a3242523dd46ea | 2,779 | ex | Elixir | lib/cadet_web/helpers/view_helper.ex | chownces/cadet | 0d8b264e4fad1c9aaab7ef3f037ac4e07a4c9b22 | [
"Apache-2.0"
] | null | null | null | lib/cadet_web/helpers/view_helper.ex | chownces/cadet | 0d8b264e4fad1c9aaab7ef3f037ac4e07a4c9b22 | [
"Apache-2.0"
] | 10 | 2022-02-24T17:57:38.000Z | 2022-03-31T07:43:05.000Z | lib/cadet_web/helpers/view_helper.ex | chownces/cadet | 0d8b264e4fad1c9aaab7ef3f037ac4e07a4c9b22 | [
"Apache-2.0"
] | 1 | 2019-07-17T15:51:58.000Z | 2019-07-17T15:51:58.000Z | defmodule CadetWeb.ViewHelper do
@moduledoc """
Helper functions shared throughout views
"""
defp build_staff(user) do
transform_map_for_view(user, [:name, :id])
end
def unsubmitted_by_builder(nil), do: nil
def unsubmitted_by_builder(staff) do
build_staff(staff)
end
def grader_builder(nil), do: nil
def grader_builder(_) do
fn %{grader: grader} -> build_staff(grader) end
end
def graded_at_builder(nil), do: nil
def graded_at_builder(_) do
fn %{updated_at: updated_at} -> format_datetime(updated_at) end
end
def format_datetime(nil), do: nil
def format_datetime(datetime = %DateTime{}) do
datetime
|> DateTime.truncate(:millisecond)
|> Timex.format!("{ISO:Extended}")
end
def format_datetime(datetime = %NaiveDateTime{}) do
datetime
|> Timex.to_datetime()
|> format_datetime()
end
@doc """
This function allows you to build a map for a view from a map of transformations or a list of fields.
Given a `key_list`, it is the equivalent of `Map.take(source, key_list)`.
Given a map of `%{view_field: source_field, ...}`, it is the equivalent of `%{view_field: Map.get(source, source_field), ...}`
Given a map of `%{view_field: source_function, ...}`, it is the equivalent of `%{view_field: apply(source_function, source)}`
Examples:
```
source = %{
foofoo: "ho",
barbar: "ha",
foobar: "hoha"
}
field_list = [:foofoo, :barbar]
transform_map_for_view(source, field_list)
> %{
foofoo: "ho",
barbar: "ha"
}
key_transformations = %{
foo: :foofoo,
bar: :barbar
}
transform_map_for_view(source, key_transformations)
> %{
foo: Map.get(source, :foofoo),
bar: Map.get(source, :barbar)
}
function_transformations = %{
foo: fn source -> source.foofoo <> "hoho",
bar: fn source -> source.barbar <> "barbar"
}
transform_map_for_view(source, function_transformations)
> %{
foo: source.foofoo <> "hoho",
bar: source.barbar <> "barbar"
}
```
"""
def transform_map_for_view(source, transformations) when is_map(transformations) do
Enum.reduce(
transformations,
%{},
fn {field_name, transformation}, acc ->
Map.put(acc, field_name, get_value(transformation, source))
end
)
end
def transform_map_for_view(source, fields) when is_list(fields) do
transform_map_for_view(
source,
Enum.reduce(fields, %{}, fn field, acc -> Map.put(acc, field, field) end)
)
end
defp get_value(source_spec, value_store) when is_function(source_spec) do
Kernel.apply(source_spec, [value_store])
end
defp get_value(source_spec, value_store) when is_binary(source_spec) or is_atom(source_spec) do
Map.get(value_store, source_spec)
end
end
| 24.165217 | 128 | 0.670385 |
033cbb986ae5c86e72e60e52447b52d416178f66 | 1,127 | ex | Elixir | lib/perf/model/request_result.ex | bancolombia/perf-analizer | 16a90dad045413ee168fd2a45f975f3c03099fb3 | [
"MIT"
] | 1 | 2020-03-03T18:17:37.000Z | 2020-03-03T18:17:37.000Z | lib/perf/model/request_result.ex | bancolombia/perf-analizer | 16a90dad045413ee168fd2a45f975f3c03099fb3 | [
"MIT"
] | 3 | 2020-02-20T18:53:59.000Z | 2020-02-20T19:00:06.000Z | lib/perf/model/request_result.ex | bancolombia/perf-analizer | 16a90dad045413ee168fd2a45f975f3c03099fb3 | [
"MIT"
] | null | null | null | defmodule RequestResult do
defstruct [
start: 0,
time_stamp: 0,
label: "",
thread_name: "",
grp_threads: 0,
all_threads: 0,
url: "",
elapsed: 0,
response_code: 0,
response_message: "",
data_type: "",
success: false,
failure_message: "",
bytes: 0,
sent_bytes: 0,
latency: 0,
idle_time: 0,
connect: 0,
response_headers: []
]
def new(label, thread_name, url, sent_bytes, connect) do
%__MODULE__{
start: :erlang.monotonic_time(:millisecond),
time_stamp: System.os_time(:millisecond),
label: label,
thread_name: thread_name,
url: url,
sent_bytes: sent_bytes,
connect: connect
}
end
def complete(
%RequestResult{start: start} = initial,
response_code,
body,
response_headers,
latency
)do
elapsed = :erlang.monotonic_time(:millisecond) - start
%{
initial |
elapsed: elapsed,
latency: latency - start,
response_code: response_code,
failure_message: body,
response_headers: response_headers
}
end
end
| 20.490909 | 58 | 0.598048 |
033cf8d14768e02d370a4f57f577a168f2e21e29 | 4,078 | exs | Elixir | apps/aecore/test/aecore_validation_test.exs | gspasov/dogs-blockchain | 884c14cfc98de2c3793a204da069630d090bbc90 | [
"0BSD"
] | null | null | null | apps/aecore/test/aecore_validation_test.exs | gspasov/dogs-blockchain | 884c14cfc98de2c3793a204da069630d090bbc90 | [
"0BSD"
] | 2 | 2018-10-01T16:46:26.000Z | 2018-10-01T19:45:42.000Z | apps/aecore/test/aecore_validation_test.exs | gspasov/dogs-blockchain | 884c14cfc98de2c3793a204da069630d090bbc90 | [
"0BSD"
] | null | null | null | defmodule AecoreValidationTest do
@moduledoc """
Unit tests for the BlockValidation module
"""
use ExUnit.Case
doctest Aecore.Chain.BlockValidation
alias Aecore.Chain.BlockValidation
alias Aecore.Chain.{Block, Header, Genesis}
alias Aecore.Chain.Worker, as: Chain
alias Aecore.Miner.Worker, as: Miner
alias Aecore.Keys
alias Aecore.Account.Account
alias Aecore.Governance.GovernanceConstants
setup_all do
Code.require_file("test_utils.ex", "./test")
TestUtils.clean_blockchain()
path = Application.get_env(:aecore, :persistence)[:path]
if File.exists?(path) do
File.rm_rf(path)
end
on_exit(fn ->
TestUtils.clean_blockchain()
end)
end
setup _ctx do
Miner.mine_sync_block_to_chain()
%{public: receiver} = :enacl.sign_keypair()
[
receiver: receiver
]
end
@tag :validation
test "validate block header height", ctx do
new_block = get_new_block(ctx.receiver)
prev_block = get_prev_block()
top_block = Chain.top_block()
top_block_hash = Header.hash(top_block.header)
blocks_for_target_calculation =
Chain.get_blocks(
top_block_hash,
GovernanceConstants.number_of_blocks_for_target_recalculation()
)
_ =
BlockValidation.calculate_and_validate_block(
new_block,
prev_block,
get_chain_state(),
blocks_for_target_calculation
)
incorrect_pow_block = %Block{new_block | header: %Header{new_block.header | height: 10}}
assert {:error, "#{BlockValidation}: Header hash doesnt meet the target"} ==
BlockValidation.calculate_and_validate_block(
incorrect_pow_block,
prev_block,
get_chain_state(),
blocks_for_target_calculation
)
end
@tag :validation
test "validate block header time", ctx do
Miner.mine_sync_block_to_chain()
new_block = get_new_block(ctx.receiver)
prev_block = get_prev_block()
top_block = Chain.top_block()
top_block_hash = Header.hash(top_block.header)
blocks_for_target_calculation =
Chain.get_blocks(
top_block_hash,
GovernanceConstants.number_of_blocks_for_target_recalculation()
)
_ =
BlockValidation.calculate_and_validate_block(
new_block,
prev_block,
get_chain_state(),
blocks_for_target_calculation
)
wrong_time_block = %Block{
new_block
| header: %Header{
new_block.header
| time:
System.system_time(:milliseconds) + System.system_time(:milliseconds) +
30 * 60 * 1000
}
}
assert {:error, "#{BlockValidation}: Invalid header time"} ==
BlockValidation.calculate_and_validate_block(
wrong_time_block,
prev_block,
get_chain_state(),
blocks_for_target_calculation
)
end
test "validate transactions in a block", ctx do
{sender, priv_key} = Keys.keypair(:sign)
amount = 5
fee = 1
nonce = Account.nonce(TestUtils.get_accounts_chainstate(), sender) + 1
{:ok, signed_tx1} =
Account.spend(sender, priv_key, ctx.receiver, amount, fee, nonce + 1, <<"payload">>)
{:ok, signed_tx2} =
Account.spend(sender, priv_key, ctx.receiver, amount + 5, fee, nonce + 2, <<"payload">>)
block = %{Genesis.block() | txs: [signed_tx1, signed_tx2]}
assert block
|> BlockValidation.validate_block_transactions()
|> Enum.all?() == true
end
def get_new_block(receiver) do
{sender, priv_key} = Keys.keypair(:sign)
amount = 100
fee = 10
{:ok, signed_tx} =
Account.spend(sender, priv_key, receiver, amount, fee, 13_213_223, <<"payload">>)
Aecore.Tx.Pool.Worker.add_transaction(signed_tx)
{:ok, new_block} = Aecore.Miner.Worker.mine_sync_block(Aecore.Miner.Worker.candidate())
new_block
end
def get_prev_block do
Chain.top_block()
end
def get_chain_state do
Chain.chain_state()
end
end
| 25.810127 | 94 | 0.652526 |
033d0485b97787992d118775d2bb033c4bbb2c1e | 295 | exs | Elixir | config/dev.exs | saulecabrera/dustbin | 6f862d35d4584acda1e082fad278a7c23dc9598b | [
"MIT"
] | null | null | null | config/dev.exs | saulecabrera/dustbin | 6f862d35d4584acda1e082fad278a7c23dc9598b | [
"MIT"
] | 7 | 2017-02-01T00:17:57.000Z | 2017-04-17T13:40:04.000Z | config/dev.exs | saulecabrera/dustbin | 6f862d35d4584acda1e082fad278a7c23dc9598b | [
"MIT"
] | null | null | null | use Mix.Config
config :extwitter, :oauth, [
consumer_key: System.get_env("TWITTER_CONSUMER_KEY"),
consumer_secret: System.get_env("TWITTER_CONSUMER_SECRET"),
access_token: System.get_env("TWITTER_ACCESS_TOKEN"),
access_token_secret: System.get_env("TWITTER_ACCESS_TOKEN_SECRET")
]
| 32.777778 | 69 | 0.786441 |
033d0542d6aeeb113630f81ada610d255c27c799 | 121 | exs | Elixir | priv/repo/load-wiki-data.exs | lee-dohm/elite-investigations | 6511bd1c734bcc7d1e4177b73006891fd1d81855 | [
"MIT"
] | null | null | null | priv/repo/load-wiki-data.exs | lee-dohm/elite-investigations | 6511bd1c734bcc7d1e4177b73006891fd1d81855 | [
"MIT"
] | 8 | 2019-03-14T16:31:37.000Z | 2019-03-31T16:14:57.000Z | priv/repo/load-wiki-data.exs | lee-dohm/elite-investigations | 6511bd1c734bcc7d1e4177b73006891fd1d81855 | [
"MIT"
] | null | null | null | alias EliteInvestigations.Galnet
__DIR__
|> Path.join("frank-heinrich-data.json")
|> File.read!()
|> Galnet.load_feed()
| 17.285714 | 40 | 0.735537 |
033d08b8019ba4525ef906f6e86e01a67927a2b4 | 1,346 | exs | Elixir | mix.exs | holandes22/phx_gen_auth | 73d49710b29a5a2608c991449f84320cfd4d4eff | [
"Apache-2.0"
] | 839 | 2020-04-01T05:33:01.000Z | 2022-03-14T19:37:56.000Z | mix.exs | holandes22/phx_gen_auth | 73d49710b29a5a2608c991449f84320cfd4d4eff | [
"Apache-2.0"
] | 119 | 2020-04-09T11:34:01.000Z | 2021-09-14T04:54:43.000Z | mix.exs | holandes22/phx_gen_auth | 73d49710b29a5a2608c991449f84320cfd4d4eff | [
"Apache-2.0"
] | 63 | 2020-04-19T23:51:29.000Z | 2022-01-06T20:08:35.000Z | defmodule Phx.Gen.Auth.MixProject do
use Mix.Project
@version "0.7.0"
def project do
[
app: :phx_gen_auth,
version: @version,
elixir: "~> 1.8",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
preferred_cli_env: [docs: :docs],
description: "An authentication system generator for Phoenix 1.5",
docs: docs(),
deps: deps(),
package: package()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:phoenix, "~> 1.5.2"},
{:phx_new, "~> 1.5.6", only: [:dev, :test]},
# Docs dependencies
{:ex_doc, "~> 0.20", only: :docs}
]
end
defp docs do
[
main: "overview",
source_ref: "v#{@version}",
source_url: "https://github.com/aaronrenner/phx_gen_auth",
extras: extras()
]
end
defp extras do
["guides/overview.md"]
end
defp package do
[
maintainers: ["Aaron Renner", "José Valim"],
licenses: ["Apache 2"],
links: %{"GitHub" => "https://github.com/aaronrenner/phx_gen_auth"}
]
end
end
| 21.709677 | 73 | 0.579495 |
033d09a1dfdfeb0418d7fe8a5df71248dead010f | 27 | ex | Elixir | statifier-ex/lib/statifier.ex | riddler/statifier | 4f4e2d1cdbb0e570a06ddf087caf2f986d1024b5 | [
"MIT"
] | 9 | 2020-08-06T23:45:07.000Z | 2020-12-21T01:47:06.000Z | statifier-ex/lib/statifier.ex | riddler/statifier | 4f4e2d1cdbb0e570a06ddf087caf2f986d1024b5 | [
"MIT"
] | 19 | 2020-03-12T03:57:46.000Z | 2020-09-11T03:40:48.000Z | statifier-ex/lib/statifier.ex | riddler/statifier | 4f4e2d1cdbb0e570a06ddf087caf2f986d1024b5 | [
"MIT"
] | null | null | null | defmodule Statifier do
end
| 9 | 22 | 0.851852 |
033d80cf2eb80bf329a389442cd6a1bed951f5a0 | 4,012 | exs | Elixir | test/swoosh/adapters/dyn_test.exs | nallwhy/swoosh | c4dfefda0d347fe73b189ac91eede3d73d6f6904 | [
"MIT"
] | 1,214 | 2016-03-21T16:56:42.000Z | 2022-03-31T19:10:11.000Z | test/swoosh/adapters/dyn_test.exs | nallwhy/swoosh | c4dfefda0d347fe73b189ac91eede3d73d6f6904 | [
"MIT"
] | 399 | 2016-03-21T23:11:32.000Z | 2022-03-04T10:52:28.000Z | test/swoosh/adapters/dyn_test.exs | nallwhy/swoosh | c4dfefda0d347fe73b189ac91eede3d73d6f6904 | [
"MIT"
] | 208 | 2016-03-21T21:12:11.000Z | 2022-03-04T06:35:33.000Z | defmodule Swoosh.Adapters.DynTest do
use Swoosh.AdapterCase, async: true
import Swoosh.Email
alias Swoosh.DeliveryError
alias Swoosh.Adapters.Dyn
@success_response """
{
"response": {
"status": 200,
"message": "OK",
"data": "250 2.1.5 Ok"
}
}
"""
setup do
bypass = Bypass.open
config = [base_url: "http://localhost:#{bypass.port}",
api_key: "fake"]
valid_email =
new()
|> from({"T Stark", "[email protected]"})
|> to({"Steve Rogers", "[email protected]"})
|> subject("Hello, Avengers!")
|> html_body("<h1>Hello</h1>")
{:ok, bypass: bypass, valid_email: valid_email, config: config}
end
test "a sent email results in :ok", %{bypass: bypass, config: config, valid_email: email} do
Bypass.expect bypass, fn conn ->
conn = parse(conn)
expected_path = "/rest/json/send"
body_params = %{
"apikey" => "fake",
"bodyhtml" => "<h1>Hello</h1>",
"from" => "\"T Stark\" <[email protected]>",
"subject" => "Hello, Avengers!",
"to" => "\"Steve Rogers\" <[email protected]>"
}
assert body_params == conn.body_params
assert expected_path == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end
assert Dyn.deliver(email, config) == {:ok, "OK"}
end
test "an email with attachments results in DeliveryError", %{config: config, valid_email: email} do
email_with_attachments = email
|> attachment("README.md")
assert_raise DeliveryError, fn ->
Dyn.deliver(email_with_attachments, config)
end
end
test "deliver/1 with all fields returns :ok", %{bypass: bypass, config: config} do
email =
new()
|> from({"T Stark", "[email protected]"})
|> to({"Steve Rogers", "[email protected]"})
|> to("[email protected]")
|> bcc([{"Clinton Francis Barton", "[email protected]"}, {"", "[email protected]"}])
|> subject("Hello, Avengers!")
|> html_body("<h1>Hello</h1>")
|> text_body("Hello")
Bypass.expect bypass, fn conn ->
conn = parse(conn)
body_params = %{
"apikey" => "fake",
"bcc" => %{
"1" => "\"Clinton Francis Barton\" <[email protected]>",
"2" => "[email protected]"
},
"bodyhtml" => "<h1>Hello</h1>",
"bodytext" => "Hello",
"from" => "\"T Stark\" <[email protected]>",
"subject" => "Hello, Avengers!",
"to" => "[email protected], \"Steve Rogers\" <[email protected]>"
}
assert body_params == conn.body_params
Plug.Conn.resp(conn, 200, @success_response)
end
assert Dyn.deliver(email, config) == {:ok, "OK"}
end
test "deliver/1 with 4xx response", %{bypass: bypass, config: config, valid_email: email} do
Bypass.expect bypass, fn conn ->
Plug.Conn.resp(conn, 404, "Not Found")
end
assert Dyn.deliver(email, config) == {:error, "Not found"}
end
test "deliver/1 with 503 response", %{bypass: bypass, valid_email: email, config: config} do
Bypass.expect bypass, fn conn ->
Plug.Conn.resp(conn, 503, "Service Unavailable")
end
assert Dyn.deliver(email, config) == {:error, "Service Unavailable"}
end
test "deliver/1 with 500 response", %{bypass: bypass, valid_email: email, config: config} do
Bypass.expect bypass, fn conn ->
Plug.Conn.resp(conn, 500, "{\"message\": \"error\"}")
end
assert Dyn.deliver(email, config) == {:error, "Error: \"{\\\"message\\\": \\\"error\\\"}\""}
end
test "validate_config/1 with valid config", %{config: config} do
assert Dyn.validate_config(config) == :ok
end
test "validate_config/1 with invalid config" do
assert_raise ArgumentError, """
expected [:api_key] to be set, got: []
""", fn ->
Dyn.validate_config([])
end
end
end
| 30.625954 | 102 | 0.593968 |
033dbd99450bc3dc08131d218f218bb365ec87e3 | 3,483 | ex | Elixir | lib/mnesiam.ex | mustafaturan/mnesiam | 39b4645a6ccc3e5cea85e735fe5627b1f8d8034e | [
"Unlicense"
] | 17 | 2017-04-20T01:07:42.000Z | 2022-01-20T08:03:56.000Z | lib/mnesiam.ex | mustafaturan/mnesiam | 39b4645a6ccc3e5cea85e735fe5627b1f8d8034e | [
"Unlicense"
] | null | null | null | lib/mnesiam.ex | mustafaturan/mnesiam | 39b4645a6ccc3e5cea85e735fe5627b1f8d8034e | [
"Unlicense"
] | 5 | 2018-01-23T04:59:04.000Z | 2018-07-24T16:13:28.000Z | defmodule Mnesiam do
@moduledoc """
Mnesia Manager
"""
require Logger
alias Mnesiam.Store
alias :mnesia, as: Mnesia
@doc """
Start Mnesia with/without a cluster
"""
def init_mnesia do
case Node.list() do
[h|_t] -> join_cluster(h)
[] -> start()
end
end
@doc """
Start Mnesia alone
"""
def start do
with :ok <- ensure_dir_exists(),
:ok <- Store.init_schema(),
:ok <- start_server(),
:ok <- Store.init_tables(),
:ok <- Store.ensure_tables_loaded() do
:ok
else
{:error, error} -> {:error, error}
end
end
@doc """
Join to a Mnesia cluster
"""
def join_cluster(cluster_node) do
with :ok <- ensure_stopped(),
:ok <- Store.delete_schema(),
:ok <- ensure_started(),
:ok <- connect(cluster_node),
:ok <- Store.copy_schema(Node.self()),
:ok <- Store.copy_tables(),
:ok <- Store.ensure_tables_loaded() do
:ok
else
{:error, reason} ->
Logger.log(:debug, fn -> inspect(reason) end)
{:error, reason}
end
end
@doc """
Cluster status
"""
def cluster_status do
running = Mnesia.system_info(:running_db_nodes)
stopped = Mnesia.system_info(:db_nodes) -- running
if stopped == [] do
[{:running_nodes, running}]
else
[{:running_nodes, running}, {:stopped_nodes, stopped}]
end
end
@doc """
Cluster with a node
"""
def connect(cluster_node) do
case Mnesia.change_config(:extra_db_nodes, [cluster_node]) do
{:ok, [_cluster_node]} -> :ok
{:ok, []} -> {:error, {:failed_to_connect_node, cluster_node}}
reason -> {:error, reason}
end
end
@doc """
Running Mnesia nodes
"""
def running_nodes do
Mnesia.system_info(:running_db_nodes)
end
@doc """
Is node in Mnesia cluster?
"""
def node_in_cluster?(cluster_node) do
Enum.member?(Mnesia.system_info(:db_nodes), cluster_node)
end
@doc """
Is running Mnesia node?
"""
def running_db_node?(cluster_node) do
Enum.member?(running_nodes(), cluster_node)
end
defp ensure_started do
with :ok <- start_server(),
:ok <- wait_for(:start) do
:ok
else
{:error, reason} -> {:error, reason}
end
end
defp ensure_stopped do
with :stopped <- stop_server(),
:ok <- wait_for(:stop) do
:ok
else
{:error, reason} -> {:error, reason}
end
end
defp ensure_dir_exists do
mnesia_dir = Mnesia.system_info(:directory)
with false <- File.exists?(mnesia_dir),
:ok <- File.mkdir(mnesia_dir) do
:ok
else
true -> :ok
{:error, reason} ->
Logger.log(:debug, fn -> inspect(reason) end)
{:error, reason}
end
end
defp start_server do
Mnesia.start()
end
defp stop_server do
Mnesia.stop()
end
defp wait_for(:start) do
case Mnesia.system_info(:is_running) do
:yes -> :ok
:no -> {:error, :mnesia_unexpectedly_stopped}
:stopping -> {:error, :mnesia_unexpectedly_stopping}
:starting ->
Process.sleep(1_000)
wait_for(:start)
end
end
defp wait_for(:stop) do
case Mnesia.system_info(:is_running) do
:no -> :ok
:yes -> {:error, :mnesia_unexpectedly_running}
:starting -> {:error, :mnesia_unexpectedly_starting}
:stopping ->
Process.sleep(1_000)
wait_for(:stop)
end
end
end
| 21.90566 | 72 | 0.583405 |
033ddb6fa79fb4685daaf2b00390c50a49716fd2 | 966 | exs | Elixir | config/config.exs | niku/nigiwaiki | 52c37d2abb2d19084d6bda69c10773ed87701135 | [
"MIT"
] | 2 | 2017-12-07T10:57:47.000Z | 2018-02-04T09:01:05.000Z | config/config.exs | niku/nigiwaiki | 52c37d2abb2d19084d6bda69c10773ed87701135 | [
"MIT"
] | null | null | null | config/config.exs | niku/nigiwaiki | 52c37d2abb2d19084d6bda69c10773ed87701135 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :nigiwiki,
ecto_repos: [Nigiwiki.Repo],
generators: [binary_id: true]
# Configures the endpoint
config :nigiwiki, NigiwikiWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "k9KJsSQlIJr/E1trqW6k1VPvSx+5L7/7O7+hXa9ckmCsFnTnvtYbB+HasoXpxMZs",
render_errors: [view: NigiwikiWeb.ErrorView, accepts: ~w(html json)],
pubsub: [name: Nigiwiki.PubSub, adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 34.5 | 86 | 0.766046 |
033de5d70443a7aab416bab2f4c006af9d634c74 | 343 | ex | Elixir | test/support/post.ex | arnodirlam/dataloader | 91db9e4f3cb8e3b375c997d83e4b1f57f97e3bf7 | [
"MIT"
] | 415 | 2017-10-18T22:27:17.000Z | 2022-03-31T06:28:37.000Z | test/support/post.ex | CargoSense/data_loader | c18e86c22543235df06c420aecac086d0cc9680b | [
"MIT"
] | 108 | 2017-10-24T07:17:06.000Z | 2022-03-20T00:19:57.000Z | test/support/post.ex | CargoSense/data_loader | c18e86c22543235df06c420aecac086d0cc9680b | [
"MIT"
] | 83 | 2017-11-15T06:15:38.000Z | 2022-03-20T00:26:44.000Z | defmodule Dataloader.Post do
use Ecto.Schema
schema "posts" do
belongs_to(:user, Dataloader.User)
has_many(:likes, Dataloader.Like)
has_many(:scores, Dataloader.Score)
has_many(:liking_users, through: [:likes, :user])
field(:title, :string)
field(:status, :string)
field(:deleted_at, :utc_datetime)
end
end
| 22.866667 | 53 | 0.690962 |
033de9476827d05b03562ed2b8087184784b3f89 | 1,853 | exs | Elixir | clients/drive_activity/mix.exs | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/drive_activity/mix.exs | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/drive_activity/mix.exs | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DriveActivity.Mixfile do
use Mix.Project
@version "0.4.0"
def project() do
[
app: :google_api_drive_activity,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/drive_activity"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.2"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Drive Activity API client library. Provides a historical view of activity in Google Drive.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/drive_activity",
"Homepage" => "https://developers.google.com/drive/activity/"
}
]
end
end
| 27.656716 | 105 | 0.658392 |
033e190acb2b4e85df1fefedefe0c464bcb814f4 | 131 | exs | Elixir | priv/repo/migrations/20161221070146_drop_snapshots_table.exs | mekongit/evercam_models | 7c36d52f2b47ffc89a3eaca65c2e353716fafacc | [
"MIT"
] | null | null | null | priv/repo/migrations/20161221070146_drop_snapshots_table.exs | mekongit/evercam_models | 7c36d52f2b47ffc89a3eaca65c2e353716fafacc | [
"MIT"
] | null | null | null | priv/repo/migrations/20161221070146_drop_snapshots_table.exs | mekongit/evercam_models | 7c36d52f2b47ffc89a3eaca65c2e353716fafacc | [
"MIT"
] | null | null | null | defmodule Evercam.Repo.Migrations.DropSnapshotsTable do
use Ecto.Migration
def change do
drop table(:snapshots)
end
end
| 16.375 | 55 | 0.770992 |
033e2f573bc3875fcd05b1d75a5dd7aaefc6374e | 4,087 | exs | Elixir | test/clova/skill_plug_test.exs | line/clova-cek-sdk-elixir | 543c26ed7fc4611351017d9ded6b5456b0854bb2 | [
"Apache-2.0"
] | 12 | 2018-07-24T01:42:03.000Z | 2020-05-29T23:06:44.000Z | test/clova/skill_plug_test.exs | line/clova-cek-sdk-elixir | 543c26ed7fc4611351017d9ded6b5456b0854bb2 | [
"Apache-2.0"
] | 2 | 2018-08-29T08:03:42.000Z | 2018-11-27T10:16:37.000Z | test/clova/skill_plug_test.exs | line/clova-cek-sdk-elixir | 543c26ed7fc4611351017d9ded6b5456b0854bb2 | [
"Apache-2.0"
] | 2 | 2018-08-01T09:36:51.000Z | 2019-10-07T05:18:33.000Z | defmodule Clova.SkillPlugTest do
use ExUnit.Case
use Plug.Test
defmodule MockJsonModule do
def encode!(_), do: Clova.SkillPlugTest.expected_response()
def decode!(_) do
%{
"context" => %{
"System" => %{
"application" => %{"applicationId" => "dummy"},
"device" => %{
"deviceId" => "dummy",
"display" => %{
"contentLayer" => %{"height" => 360, "width" => 640},
"dpi" => 96,
"orientation" => "landscape",
"size" => "l100"
}
},
"user" => %{"accessToken" => "dummy", "userId" => "dummy"}
}
},
"request" => %{"type" => "LaunchRequest"},
"session" => %{
"new" => true,
"sessionAttributes" => nil,
"sessionId" => "dummy",
"user" => %{"accessToken" => "dummy", "userId" => "dummy"}
},
"version" => "1.0"
}
end
end
defmodule MockDispatcher do
def handle_launch(_, _), do: %Clova.Response{}
end
test "init initialises parser, encoder, validator, and dispatcher plugs" do
common_opts = [
json_decoder: MockJsonModule,
json_encoder: MockJsonModule,
force_signature_valid: true,
dispatch_to: MockDispatcher
]
opts_parser =
Plug.Parsers.init(
parsers: [:json],
json_decoder: MockJsonModule,
body_reader: Clova.CachingBodyReader.spec()
)
opts_encoder = Clova.EncoderPlug.init(json_encoder: MockJsonModule)
opts_validator = Clova.ValidatorPlug.init(common_opts)
opts_dispatcher = Clova.DispatcherPlug.init(common_opts)
expected_opts = {opts_parser, opts_encoder, opts_validator, opts_dispatcher}
assert expected_opts == Clova.SkillPlug.init(common_opts)
end
test "call with a valid request generates a valid response" do
opts =
Clova.SkillPlug.init(
json_decoder: MockJsonModule,
json_encoder: MockJsonModule,
force_signature_valid: true,
dispatch_to: MockDispatcher
)
conn = Clova.SkillPlug.call(make_conn(), opts)
assert conn.resp_body == expected_response()
end
test "initialization errors from the wrapped plugs are propagated" do
assert_raise(
ArgumentError,
"JSON parser expects a :json_decoder option",
fn -> Clova.SkillPlug.init([]) end
)
assert_raise(
ArgumentError,
"Elixir.Clova.EncoderPlug expects a :json_encoder option",
fn -> Clova.SkillPlug.init(json_decoder: MockJsonModule) end
)
assert_raise(
ArgumentError,
"Must supply dispatch module as :dispatch_to argument",
fn -> Clova.SkillPlug.init(json_decoder: MockJsonModule, json_encoder: MockJsonModule) end
)
end
test ":json_module argument is converted to :json_encoder and :json_decoder" do
{parser, encoder} = extract_opts(json_module: A)
assert parser == A
assert encoder == A
{parser, encoder} = extract_opts(json_module: B, json_encoder: C)
assert parser == B
assert encoder == C
{parser, encoder} = extract_opts(json_module: D, json_decoder: E)
assert parser == E
assert encoder == D
{parser, encoder} = extract_opts(json_encoder: F, json_decoder: G)
assert parser == G
assert encoder == F
end
defp extract_opts(input_opts) do
{parser, encoder, _, _} = Clova.SkillPlug.init([{:dispatch_to, MockDispatcher} | input_opts])
{[{_, {_, parser_module, _}}], _, _} = parser
%{json_encoder: encoder_module} = encoder
{parser_module, encoder_module}
end
defp make_conn do
conn(:post, "/clova", "{}")
|> put_req_header("content-type", "application/json")
|> put_req_header("signaturecek", "aGVsbG8=")
end
def expected_response do
~S({"version": "1.0","sessionAttributes": {},"response": {"shouldEndSession": true,"reprompt": null,"outputSpeech": {"values": {"value": "dummy response","type": "PlainText","lang": "ja"},"type": "SimpleSpeech"},"directives": null,"card": null}})
end
end
| 30.962121 | 250 | 0.616589 |
033e30c348b007e00d9ef666f803d079d9790c80 | 2,219 | exs | Elixir | test/elasticsearch/indexing/bulk_test.exs | eli-papa/elasticsearch-elixir | 4b9a4e5b14d2b7b7dd7fb73c7130c98fc174bdd0 | [
"MIT"
] | null | null | null | test/elasticsearch/indexing/bulk_test.exs | eli-papa/elasticsearch-elixir | 4b9a4e5b14d2b7b7dd7fb73c7130c98fc174bdd0 | [
"MIT"
] | null | null | null | test/elasticsearch/indexing/bulk_test.exs | eli-papa/elasticsearch-elixir | 4b9a4e5b14d2b7b7dd7fb73c7130c98fc174bdd0 | [
"MIT"
] | null | null | null | defmodule Elasticsearch.Index.BulkTest do
use Elasticsearch.DataCase
import ExUnit.CaptureLog
alias Elasticsearch.{
Index.Bulk,
Test.Cluster,
Test.Store
}
defmodule TestException do
defexception [:message]
end
defmodule ErrorAPI do
@behaviour Elasticsearch.API
@impl true
def request(_config, :put, _url, _data, _opts) do
{:ok,
%HTTPoison.Response{
status_code: 201,
body: %{
"errors" => true,
"items" => [
%{"create" => %{"error" => %{"type" => "type", "reason" => "reason"}}}
]
}
}}
end
end
doctest Elasticsearch.Index.Bulk
describe ".upload/4" do
# Regression test for https://github.com/danielberkompas/elasticsearch-elixir/issues/10
@tag :regression
test "calls itself recursively properly" do
assert :ok =
Bulk.upload(Cluster, :posts, %{store: Store, sources: [Post]}, [
%TestException{}
])
end
test "collects errors properly" do
populate_posts_table(1)
assert :ok =
Cluster
|> Elasticsearch.Cluster.Config.get()
|> Map.put(:api, ErrorAPI)
|> Bulk.upload(:posts, %{store: Store, sources: [Post]})
end
test "respects bulk_* settings" do
populate_posts_table(2)
populate_comments_table(2)
Logger.configure(level: :debug)
output =
capture_log([level: :debug], fn ->
Elasticsearch.Index.create_from_file(
Cluster,
"posts-bulk-test",
"test/support/settings/posts.json"
)
Bulk.upload(Cluster, "posts-bulk-test", %{
store: Store,
sources: [Post],
bulk_page_size: 1,
bulk_wait_interval: 0
})
Elasticsearch.delete!(Cluster, "/posts-bulk-test")
end)
assert output =~ "Pausing 0ms between bulk pages"
end
end
describe ".encode!/3" do
test "respects _routing meta-field" do
assert Bulk.encode!(Cluster, %Comment{id: "my-id", post_id: "123"}, "my-index") =~
"\"_routing\":\"123\""
end
end
end
| 24.384615 | 91 | 0.557458 |
033e3366523966dc4e8b0d17c6ac2c30559526b3 | 78,760 | exs | Elixir | apps/admin_api/test/admin_api/v1/controllers/admin_auth/transaction_consumption_controller_test.exs | amadeobrands/ewallet | 505b7822721940a7b892a9b35c225e80cc8ac0b4 | [
"Apache-2.0"
] | 1 | 2018-12-07T06:21:21.000Z | 2018-12-07T06:21:21.000Z | apps/admin_api/test/admin_api/v1/controllers/admin_auth/transaction_consumption_controller_test.exs | amadeobrands/ewallet | 505b7822721940a7b892a9b35c225e80cc8ac0b4 | [
"Apache-2.0"
] | null | null | null | apps/admin_api/test/admin_api/v1/controllers/admin_auth/transaction_consumption_controller_test.exs | amadeobrands/ewallet | 505b7822721940a7b892a9b35c225e80cc8ac0b4 | [
"Apache-2.0"
] | null | null | null | defmodule AdminAPI.V1.AdminAuth.TransactionConsumptionControllerTest do
use AdminAPI.ConnCase, async: true
alias EWalletDB.{
Account,
AccountUser,
Repo,
Token,
Transaction,
TransactionConsumption,
User,
Wallet
}
alias EWallet.{BalanceFetcher, TestEndpoint}
alias EWallet.Web.{Date, Orchestrator, V1.WebsocketResponseSerializer}
alias Phoenix.Socket.Broadcast
alias EWallet.Web.V1.{
AccountSerializer,
TokenSerializer,
TransactionRequestSerializer,
TransactionSerializer,
TransactionConsumptionOverlay
}
alias AdminAPI.V1.Endpoint
alias EWallet.TransactionConsumptionScheduler
setup do
{:ok, _} = TestEndpoint.start_link()
account = Account.get_master_account()
{:ok, alice} = :user |> params_for() |> User.insert()
bob = get_test_user()
{:ok, _} = AccountUser.link(account.uuid, bob.uuid)
%{
account: account,
token: insert(:token),
alice: alice,
bob: bob,
account_wallet: Account.get_primary_wallet(account),
alice_wallet: User.get_primary_wallet(alice),
bob_wallet: User.get_primary_wallet(bob)
}
end
describe "/transaction_consumption.all" do
setup do
user = get_test_user()
account = Account.get_master_account()
tc_1 = insert(:transaction_consumption, user_uuid: user.uuid, status: "pending")
tc_2 = insert(:transaction_consumption, account_uuid: account.uuid, status: "pending")
tc_3 = insert(:transaction_consumption, account_uuid: account.uuid, status: "confirmed")
%{
user: user,
tc_1: tc_1,
tc_2: tc_2,
tc_3: tc_3
}
end
test "returns all the transaction_consumptions", meta do
response =
admin_user_request("/transaction_consumption.all", %{
"sort_by" => "created",
"sort_dir" => "asc"
})
transfers = [
meta.tc_1,
meta.tc_2,
meta.tc_3
]
assert length(response["data"]["data"]) == length(transfers)
# All transfers made during setup should exist in the response
assert Enum.all?(transfers, fn transfer ->
Enum.any?(response["data"]["data"], fn data ->
transfer.id == data["id"]
end)
end)
end
test "returns all the transaction_consumptions for a specific status", meta do
response =
admin_user_request("/transaction_consumption.all", %{
"sort_by" => "created_at",
"sort_dir" => "asc",
"search_terms" => %{
"status" => "pending"
}
})
assert response["data"]["data"] |> length() == 2
assert Enum.map(response["data"]["data"], fn t ->
t["id"]
end) == [
meta.tc_1.id,
meta.tc_2.id
]
end
test "returns all transaction_consumptions filtered", meta do
response =
admin_user_request("/transaction_consumption.all", %{
"sort_by" => "created_at",
"sort_dir" => "asc",
"search_term" => "pending"
})
assert response["data"]["data"] |> length() == 2
assert Enum.map(response["data"]["data"], fn t ->
t["id"]
end) == [
meta.tc_1.id,
meta.tc_2.id
]
end
test "returns all transaction_consumptions sorted and paginated", meta do
response =
admin_user_request("/transaction_consumption.all", %{
"sort_by" => "created_at",
"sort_dir" => "asc",
"per_page" => 2,
"page" => 1
})
assert response["data"]["data"] |> length() == 2
transaction_1 = Enum.at(response["data"]["data"], 0)
transaction_2 = Enum.at(response["data"]["data"], 1)
assert transaction_2["created_at"] > transaction_1["created_at"]
assert Enum.map(response["data"]["data"], fn t ->
t["id"]
end) == [
meta.tc_1.id,
meta.tc_2.id
]
end
# The endpoint will scope the result to the consumptions associated with the requester,
# hence the customized factory attrs to make sure the results will be found.
test_supports_match_any(
"/transaction_consumption.all",
:admin_auth,
:transaction_consumption,
:correlation_id,
factory_attrs: %{
user_uuid: get_test_admin().uuid,
account_uuid: Account.get_master_account().uuid
}
)
# The endpoint will scope the result to the consumptions associated with the requester,
# hence the customized factory attrs to make sure the results will be found.
test_supports_match_all(
"/transaction_consumption.all",
:admin_auth,
:transaction_consumption,
:correlation_id,
factory_attrs: %{
user_uuid: get_test_admin().uuid,
account_uuid: Account.get_master_account().uuid
}
)
end
describe "/account.get_transaction_consumptions" do
setup do
user = get_test_user()
account = Account.get_master_account()
tc_1 = insert(:transaction_consumption, user_uuid: user.uuid, status: "pending")
tc_2 = insert(:transaction_consumption, account_uuid: account.uuid, status: "pending")
tc_3 = insert(:transaction_consumption, account_uuid: account.uuid, status: "confirmed")
%{
user: user,
account: account,
tc_1: tc_1,
tc_2: tc_2,
tc_3: tc_3
}
end
test "returns :invalid_parameter when account id is not provided" do
response =
admin_user_request("/account.get_transaction_consumptions", %{
"sort_by" => "created",
"sort_dir" => "asc"
})
assert response == %{
"data" => %{
"code" => "client:invalid_parameter",
"description" => "Invalid parameter provided. `id` is required.",
"messages" => nil,
"object" => "error"
},
"success" => false,
"version" => "1"
}
end
test "returns :account_id_not_found when id is not provided" do
response =
admin_user_request("/account.get_transaction_consumptions", %{
"id" => "fake",
"sort_by" => "created",
"sort_dir" => "asc"
})
assert response == %{
"success" => false,
"version" => "1",
"data" => %{
"messages" => nil,
"object" => "error",
"code" => "unauthorized",
"description" => "You are not allowed to perform the requested operation."
}
}
end
test "returns all the transaction_consumptions for an account", meta do
response =
admin_user_request("/account.get_transaction_consumptions", %{
"id" => meta.account.id,
"sort_by" => "created",
"sort_dir" => "asc"
})
transfers = [
meta.tc_1,
meta.tc_2,
meta.tc_3
]
assert length(response["data"]["data"]) == 3
# All transfers made during setup should exist in the response
assert Enum.all?(transfers, fn transfer ->
Enum.any?(response["data"]["data"], fn data ->
transfer.id == data["id"]
end)
end)
end
test "returns all the transaction_consumptions for a specific status", meta do
response =
admin_user_request("/account.get_transaction_consumptions", %{
"id" => meta.account.id,
"sort_by" => "created_at",
"sort_dir" => "asc",
"search_terms" => %{
"status" => "pending"
}
})
assert response["data"]["data"] |> length() == 2
assert Enum.map(response["data"]["data"], fn t ->
t["id"]
end) == [
meta.tc_1.id,
meta.tc_2.id
]
end
test "returns all transaction_consumptions sorted and paginated", meta do
response =
admin_user_request("/account.get_transaction_consumptions", %{
"id" => meta.account.id,
"sort_by" => "created_at",
"sort_dir" => "asc",
"per_page" => 2,
"page" => 1
})
assert response["data"]["data"] |> length() == 2
transaction_1 = Enum.at(response["data"]["data"], 0)
transaction_2 = Enum.at(response["data"]["data"], 1)
assert transaction_2["created_at"] > transaction_1["created_at"]
assert Enum.map(response["data"]["data"], fn t ->
t["id"]
end) == [
meta.tc_1.id,
meta.tc_2.id
]
end
end
describe "/user.get_transaction_consumptions" do
setup do
user = get_test_user()
account = Account.get_master_account()
tc_1 = insert(:transaction_consumption, account_uuid: account.uuid, status: "pending")
tc_2 = insert(:transaction_consumption, user_uuid: user.uuid, status: "pending")
tc_3 = insert(:transaction_consumption, user_uuid: user.uuid, status: "confirmed")
%{
user: user,
account: account,
tc_1: tc_1,
tc_2: tc_2,
tc_3: tc_3
}
end
test "returns :invalid_parameter when id or provider_user_id is not provided" do
response =
admin_user_request("/user.get_transaction_consumptions", %{
"sort_by" => "created",
"sort_dir" => "asc"
})
assert response == %{
"data" => %{
"code" => "client:invalid_parameter",
"description" =>
"Invalid parameter provided. `user_id` or `provider_user_id` is required.",
"messages" => nil,
"object" => "error"
},
"success" => false,
"version" => "1"
}
end
test "returns :id_not_found when id is not valid" do
response =
admin_user_request("/user.get_transaction_consumptions", %{
"id" => "fake",
"sort_by" => "created",
"sort_dir" => "asc"
})
assert response == %{
"success" => false,
"version" => "1",
"data" => %{
"messages" => nil,
"object" => "error",
"code" => "user:id_not_found",
"description" => "There is no user corresponding to the provided id."
}
}
end
test "returns :provider_user_id_not_found when provider_user_id is not valid" do
response =
admin_user_request("/user.get_transaction_consumptions", %{
"provider_user_id" => "fake",
"sort_by" => "created",
"sort_dir" => "asc"
})
assert response == %{
"success" => false,
"version" => "1",
"data" => %{
"messages" => nil,
"object" => "error",
"code" => "user:provider_user_id_not_found",
"description" =>
"There is no user corresponding to the provided provider_user_id."
}
}
end
test "returns all the transaction_consumptions for a user when given an id", meta do
response =
admin_user_request("/user.get_transaction_consumptions", %{
"id" => meta.user.id,
"sort_by" => "created",
"sort_dir" => "asc"
})
assert length(response["data"]["data"]) == 2
assert Enum.map(response["data"]["data"], fn t ->
t["id"]
end) == [
meta.tc_2.id,
meta.tc_3.id
]
end
test "returns all the transaction_consumptions for a user when given a user_id", meta do
response =
admin_user_request("/user.get_transaction_consumptions", %{
"user_id" => meta.user.id,
"sort_by" => "created",
"sort_dir" => "asc"
})
assert length(response["data"]["data"]) == 2
assert Enum.map(response["data"]["data"], fn t ->
t["id"]
end) == [
meta.tc_2.id,
meta.tc_3.id
]
end
test "returns all the transaction_consumptions for a user when given a provider_user_id",
meta do
response =
admin_user_request("/user.get_transaction_consumptions", %{
"provider_user_id" => meta.user.provider_user_id,
"sort_by" => "created",
"sort_dir" => "asc"
})
assert length(response["data"]["data"]) == 2
assert Enum.map(response["data"]["data"], fn t ->
t["id"]
end) == [
meta.tc_2.id,
meta.tc_3.id
]
end
test "returns all the transaction_consumptions for a specific status", meta do
response =
admin_user_request("/user.get_transaction_consumptions", %{
"user_id" => meta.user.id,
"sort_by" => "created_at",
"sort_dir" => "asc",
"search_terms" => %{
"status" => "pending"
}
})
assert response["data"]["data"] |> length() == 1
assert Enum.map(response["data"]["data"], fn t ->
t["id"]
end) == [
meta.tc_2.id
]
end
test "returns all transaction_consumptions sorted and paginated", meta do
response =
admin_user_request("/user.get_transaction_consumptions", %{
"user_id" => meta.user.id,
"sort_by" => "created_at",
"sort_dir" => "asc",
"per_page" => 2,
"page" => 1
})
assert response["data"]["data"] |> length() == 2
transaction_1 = Enum.at(response["data"]["data"], 0)
transaction_2 = Enum.at(response["data"]["data"], 1)
assert transaction_2["created_at"] > transaction_1["created_at"]
assert Enum.map(response["data"]["data"], fn t ->
t["id"]
end) == [
meta.tc_2.id,
meta.tc_3.id
]
end
end
describe "/transaction_request.get_transaction_consumptions" do
setup do
account = insert(:account)
transaction_request = insert(:transaction_request)
tc_1 = insert(:transaction_consumption, account_uuid: account.uuid, status: "pending")
tc_2 =
insert(
:transaction_consumption,
transaction_request_uuid: transaction_request.uuid,
status: "pending"
)
tc_3 =
insert(
:transaction_consumption,
transaction_request_uuid: transaction_request.uuid,
status: "confirmed"
)
%{
transaction_request: transaction_request,
tc_1: tc_1,
tc_2: tc_2,
tc_3: tc_3
}
end
test "returns :invalid_parameter when formatted_transaction_request_id is not provided" do
response =
admin_user_request("/transaction_request.get_transaction_consumptions", %{
"sort_by" => "created",
"sort_dir" => "asc"
})
assert response == %{
"data" => %{
"code" => "client:invalid_parameter",
"description" =>
"Invalid parameter provided. `formatted_transaction_request_id` is required.",
"messages" => nil,
"object" => "error"
},
"success" => false,
"version" => "1"
}
end
test "returns :unauthorized when formatted_transaction_request_id is not valid" do
response =
admin_user_request("/transaction_request.get_transaction_consumptions", %{
"formatted_transaction_request_id" => "fake",
"sort_by" => "created",
"sort_dir" => "asc"
})
assert response == %{
"success" => false,
"version" => "1",
"data" => %{
"code" => "unauthorized",
"messages" => nil,
"object" => "error",
"description" => "You are not allowed to perform the requested operation."
}
}
end
test "returns all the transaction_consumptions for a transaction_request", meta do
response =
admin_user_request("/transaction_request.get_transaction_consumptions", %{
"formatted_transaction_request_id" => meta.transaction_request.id,
"sort_by" => "created",
"sort_dir" => "asc"
})
assert length(response["data"]["data"]) == 2
assert Enum.map(response["data"]["data"], fn t ->
t["id"]
end) == [
meta.tc_2.id,
meta.tc_3.id
]
end
test "returns all the transaction_consumptions for a specific status", meta do
response =
admin_user_request("/transaction_request.get_transaction_consumptions", %{
"formatted_transaction_request_id" => meta.transaction_request.id,
"sort_by" => "created_at",
"sort_dir" => "asc",
"search_terms" => %{
"status" => "pending"
}
})
assert response["data"]["data"] |> length() == 1
assert Enum.map(response["data"]["data"], fn t ->
t["id"]
end) == [
meta.tc_2.id
]
end
test "returns all transaction_consumptions sorted and paginated", meta do
response =
admin_user_request("/transaction_request.get_transaction_consumptions", %{
"formatted_transaction_request_id" => meta.transaction_request.id,
"sort_by" => "created_at",
"sort_dir" => "asc",
"per_page" => 2,
"page" => 1
})
assert response["data"]["data"] |> length() == 2
transaction_1 = Enum.at(response["data"]["data"], 0)
transaction_2 = Enum.at(response["data"]["data"], 1)
assert transaction_2["created_at"] > transaction_1["created_at"]
assert Enum.map(response["data"]["data"], fn t ->
t["id"]
end) == [
meta.tc_2.id,
meta.tc_3.id
]
end
end
describe "/wallet.get_transaction_consumptions" do
setup do
account = insert(:account)
wallet = insert(:wallet)
{:ok, _} = AccountUser.link(account.uuid, wallet.user_uuid)
tc_1 = insert(:transaction_consumption, account_uuid: account.uuid, status: "pending")
tc_2 =
insert(
:transaction_consumption,
wallet_address: wallet.address,
status: "pending"
)
tc_3 =
insert(
:transaction_consumption,
wallet_address: wallet.address,
status: "confirmed"
)
%{
wallet: wallet,
tc_1: tc_1,
tc_2: tc_2,
tc_3: tc_3
}
end
test "returns :invalid_parameter when address is not provided" do
response =
admin_user_request("/wallet.get_transaction_consumptions", %{
"sort_by" => "created",
"sort_dir" => "asc"
})
assert response == %{
"data" => %{
"code" => "client:invalid_parameter",
"description" => "Invalid parameter provided. `address` is required.",
"messages" => nil,
"object" => "error"
},
"success" => false,
"version" => "1"
}
end
test "returns :unauthorized when address is not provided" do
response =
admin_user_request("/wallet.get_transaction_consumptions", %{
"address" => "fake-0000-0000-0000",
"sort_by" => "created",
"sort_dir" => "asc"
})
assert response == %{
"success" => false,
"version" => "1",
"data" => %{
"code" => "unauthorized",
"messages" => nil,
"object" => "error",
"description" => "You are not allowed to perform the requested operation."
}
}
end
test "returns all the transaction_consumptions for a wallet", meta do
response =
admin_user_request("/wallet.get_transaction_consumptions", %{
"address" => meta.wallet.address,
"sort_by" => "created",
"sort_dir" => "asc"
})
assert length(response["data"]["data"]) == 2
assert Enum.map(response["data"]["data"], fn t ->
t["id"]
end) == [
meta.tc_2.id,
meta.tc_3.id
]
end
test "returns all the transaction_consumptions for a specific status", meta do
response =
admin_user_request("/wallet.get_transaction_consumptions", %{
"address" => meta.wallet.address,
"sort_by" => "created_at",
"sort_dir" => "asc",
"search_terms" => %{
"status" => "pending"
}
})
assert response["data"]["data"] |> length() == 1
assert Enum.map(response["data"]["data"], fn t ->
t["id"]
end) == [
meta.tc_2.id
]
end
test "returns all transaction_consumptions sorted and paginated", meta do
response =
admin_user_request("/wallet.get_transaction_consumptions", %{
"address" => meta.wallet.address,
"sort_by" => "created_at",
"sort_dir" => "asc",
"per_page" => 2,
"page" => 1
})
assert response["data"]["data"] |> length() == 2
transaction_1 = Enum.at(response["data"]["data"], 0)
transaction_2 = Enum.at(response["data"]["data"], 1)
assert transaction_2["created_at"] > transaction_1["created_at"]
assert Enum.map(response["data"]["data"], fn t ->
t["id"]
end) == [
meta.tc_2.id,
meta.tc_3.id
]
end
end
describe "/transaction_consumption.get" do
test "returns the transaction consumption" do
transaction_consumption = insert(:transaction_consumption)
response =
admin_user_request("/transaction_consumption.get", %{
id: transaction_consumption.id
})
assert response["success"] == true
assert response["data"]["id"] == transaction_consumption.id
end
test "returns an error when the consumption ID is not found" do
response =
admin_user_request("/transaction_consumption.get", %{
id: "123"
})
assert response == %{
"success" => false,
"version" => "1",
"data" => %{
"code" => "transaction_consumption:transaction_consumption_not_found",
"description" =>
"There is no transaction consumption corresponding to the provided ID.",
"messages" => nil,
"object" => "error"
}
}
end
end
describe "/transaction_request.consume" do
test "consumes the request and transfers the appropriate amount of tokens", meta do
transaction_request =
insert(
:transaction_request,
type: "receive",
token_uuid: meta.token.uuid,
user_uuid: meta.alice.uuid,
wallet: meta.alice_wallet,
amount: 100_000 * meta.token.subunit_to_unit
)
set_initial_balance(%{
address: meta.bob_wallet.address,
token: meta.token,
amount: 150_000
})
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: nil,
address: nil,
metadata: nil,
token_id: nil,
account_id: meta.account.id
})
inserted_consumption = TransactionConsumption |> Repo.all() |> Enum.at(0)
{:ok, inserted_consumption} =
Orchestrator.one(inserted_consumption, TransactionConsumptionOverlay)
request = inserted_consumption.transaction_request
inserted_transaction = inserted_consumption.transaction
assert response == %{
"success" => true,
"version" => "1",
"data" => %{
"address" => meta.account_wallet.address,
"amount" => nil,
"estimated_consumption_amount" => 100_000 * meta.token.subunit_to_unit,
"estimated_request_amount" => 100_000 * meta.token.subunit_to_unit,
"finalized_request_amount" => 100_000 * meta.token.subunit_to_unit,
"finalized_consumption_amount" => 100_000 * meta.token.subunit_to_unit,
"correlation_id" => nil,
"id" => inserted_consumption.id,
"socket_topic" => "transaction_consumption:#{inserted_consumption.id}",
"idempotency_token" => "123",
"object" => "transaction_consumption",
"status" => "confirmed",
"token_id" => meta.token.id,
"token" => meta.token |> TokenSerializer.serialize() |> stringify_keys(),
"transaction_request_id" => transaction_request.id,
"transaction_request" =>
request |> TransactionRequestSerializer.serialize() |> stringify_keys(),
"transaction_id" => inserted_transaction.id,
"transaction" =>
inserted_transaction |> TransactionSerializer.serialize() |> stringify_keys(),
"user_id" => nil,
"user" => nil,
"account_id" => meta.account.id,
"account" => meta.account |> AccountSerializer.serialize() |> stringify_keys(),
"exchange_account" => nil,
"exchange_account_id" => nil,
"exchange_wallet" => nil,
"exchange_wallet_address" => nil,
"metadata" => %{},
"encrypted_metadata" => %{},
"expiration_date" => nil,
"created_at" => Date.to_iso8601(inserted_consumption.inserted_at),
"approved_at" => Date.to_iso8601(inserted_consumption.approved_at),
"rejected_at" => Date.to_iso8601(inserted_consumption.rejected_at),
"confirmed_at" => Date.to_iso8601(inserted_consumption.confirmed_at),
"failed_at" => Date.to_iso8601(inserted_consumption.failed_at),
"expired_at" => nil
}
}
assert inserted_transaction.from_amount == 100_000 * meta.token.subunit_to_unit
assert inserted_transaction.from_token_uuid == meta.token.uuid
assert inserted_transaction.to_amount == 100_000 * meta.token.subunit_to_unit
assert inserted_transaction.to_token_uuid == meta.token.uuid
assert inserted_transaction.to == meta.alice_wallet.address
assert inserted_transaction.from == meta.account_wallet.address
assert inserted_transaction.local_ledger_uuid != nil
end
test "fails to consume when trying to send from a burn wallet", meta do
burn_wallet = Account.get_default_burn_wallet(meta.account)
transaction_request =
insert(
:transaction_request,
type: "send",
token_uuid: meta.token.uuid,
wallet: burn_wallet,
amount: 100_000 * meta.token.subunit_to_unit
)
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: nil,
address: nil,
metadata: nil,
token_id: nil,
user_id: meta.bob.id
})
assert response["success"] == false
assert response["data"]["code"] == "client:invalid_parameter"
assert response["data"]["description"] ==
"Invalid parameter provided. `from` can't be the address of a burn wallet."
inserted_consumption = TransactionConsumption |> Repo.all() |> Enum.at(0)
assert inserted_consumption.error_code == "client:invalid_parameter"
assert inserted_consumption.error_description ==
"Invalid parameter provided. `from` can't be the address of a burn wallet."
end
test "consumes the request and transfers the appropriate amount of tokens with string",
meta do
transaction_request =
insert(
:transaction_request,
type: "receive",
token_uuid: meta.token.uuid,
user_uuid: meta.alice.uuid,
wallet: meta.alice_wallet,
amount: nil
)
set_initial_balance(%{
address: meta.bob_wallet.address,
token: meta.token,
amount: 100_000 * meta.token.subunit_to_unit
})
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: "10000000",
address: nil,
metadata: nil,
token_id: nil,
account_id: meta.account.id
})
inserted_consumption = TransactionConsumption |> Repo.all() |> Enum.at(0)
{:ok, inserted_consumption} =
Orchestrator.one(inserted_consumption, TransactionConsumptionOverlay)
request = inserted_consumption.transaction_request
inserted_transaction = inserted_consumption.transaction
assert response == %{
"success" => true,
"version" => "1",
"data" => %{
"address" => meta.account_wallet.address,
"amount" => 100_000 * meta.token.subunit_to_unit,
"estimated_consumption_amount" => 100_000 * meta.token.subunit_to_unit,
"estimated_request_amount" => 100_000 * meta.token.subunit_to_unit,
"finalized_request_amount" => 100_000 * meta.token.subunit_to_unit,
"finalized_consumption_amount" => 100_000 * meta.token.subunit_to_unit,
"correlation_id" => nil,
"id" => inserted_consumption.id,
"socket_topic" => "transaction_consumption:#{inserted_consumption.id}",
"idempotency_token" => "123",
"object" => "transaction_consumption",
"status" => "confirmed",
"token_id" => meta.token.id,
"token" => meta.token |> TokenSerializer.serialize() |> stringify_keys(),
"transaction_request_id" => transaction_request.id,
"transaction_request" =>
request |> TransactionRequestSerializer.serialize() |> stringify_keys(),
"transaction_id" => inserted_transaction.id,
"transaction" =>
inserted_transaction |> TransactionSerializer.serialize() |> stringify_keys(),
"user_id" => nil,
"user" => nil,
"account_id" => meta.account.id,
"account" => meta.account |> AccountSerializer.serialize() |> stringify_keys(),
"exchange_account" => nil,
"exchange_account_id" => nil,
"exchange_wallet" => nil,
"exchange_wallet_address" => nil,
"metadata" => %{},
"encrypted_metadata" => %{},
"expiration_date" => nil,
"created_at" => Date.to_iso8601(inserted_consumption.inserted_at),
"approved_at" => Date.to_iso8601(inserted_consumption.approved_at),
"rejected_at" => Date.to_iso8601(inserted_consumption.rejected_at),
"confirmed_at" => Date.to_iso8601(inserted_consumption.confirmed_at),
"failed_at" => Date.to_iso8601(inserted_consumption.failed_at),
"expired_at" => nil
}
}
assert inserted_transaction.from_amount == 100_000 * meta.token.subunit_to_unit
assert inserted_transaction.from_token_uuid == meta.token.uuid
assert inserted_transaction.to_amount == 100_000 * meta.token.subunit_to_unit
assert inserted_transaction.to_token_uuid == meta.token.uuid
assert inserted_transaction.to == meta.alice_wallet.address
assert inserted_transaction.from == meta.account_wallet.address
assert inserted_transaction.local_ledger_uuid != nil
end
test "consumes the request and transfers with exchange details in request",
meta do
token_2 = insert(:token)
mint!(token_2)
_pair = insert(:exchange_pair, from_token: meta.token, to_token: token_2, rate: 2)
transaction_request =
insert(
:transaction_request,
type: "send",
token_uuid: meta.token.uuid,
user_uuid: meta.alice.uuid,
wallet: meta.alice_wallet,
amount: 100_000 * meta.token.subunit_to_unit,
exchange_account_uuid: meta.account.uuid,
exchange_wallet_address: meta.account_wallet.address
)
set_initial_balance(%{
address: meta.alice_wallet.address,
token: meta.token,
amount: 150_000
})
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: nil,
address: nil,
metadata: nil,
token_id: token_2.id,
user_id: meta.bob.id
})
assert response["success"] == true
inserted_consumption = TransactionConsumption |> Repo.all() |> Enum.at(0)
inserted_transaction = Repo.get(Transaction, inserted_consumption.transaction_uuid)
assert response["data"]["amount"] == nil
assert response["data"]["finalized_request_amount"] == 100_000 * meta.token.subunit_to_unit
assert response["data"]["finalized_consumption_amount"] == 200_000 * token_2.subunit_to_unit
assert response["data"]["estimated_request_amount"] == 100_000 * meta.token.subunit_to_unit
assert response["data"]["estimated_consumption_amount"] == 200_000 * token_2.subunit_to_unit
assert response["data"]["token_id"] == token_2.id
assert response["data"]["address"] == meta.bob_wallet.address
assert response["data"]["user_id"] == meta.bob.id
assert response["data"]["transaction_request"]["amount"] ==
100_000 * meta.token.subunit_to_unit
assert response["data"]["transaction_request"]["token_id"] == meta.token.id
assert response["data"]["transaction_request"]["address"] == meta.alice_wallet.address
assert response["data"]["transaction_request"]["user_id"] == meta.alice.id
assert response["data"]["transaction"] != nil
assert response["data"]["transaction"]["exchange"]["exchange_pair"]["to_token_id"] != nil
assert response["data"]["transaction"]["exchange"]["exchange_pair"]["from_token_id"] != nil
assert inserted_transaction.from_amount == 100_000 * meta.token.subunit_to_unit
assert inserted_transaction.from_token_uuid == meta.token.uuid
assert inserted_transaction.to_amount == 200_000 * token_2.subunit_to_unit
assert inserted_transaction.to_token_uuid == token_2.uuid
assert inserted_transaction.to == meta.bob_wallet.address
assert inserted_transaction.from == meta.alice_wallet.address
assert inserted_transaction.local_ledger_uuid != nil
{:ok, b1} = BalanceFetcher.get(meta.token.id, meta.alice_wallet)
{:ok, b2} = BalanceFetcher.get(token_2.id, meta.alice_wallet)
assert Enum.at(b1.balances, 0).amount == (150_000 - 100_000) * meta.token.subunit_to_unit
assert Enum.at(b2.balances, 0).amount == 0
{:ok, b1} = BalanceFetcher.get(meta.token.id, meta.bob_wallet)
{:ok, b2} = BalanceFetcher.get(token_2.id, meta.bob_wallet)
assert Enum.at(b1.balances, 0).amount == 0
assert Enum.at(b2.balances, 0).amount == 100_000 * 2 * meta.token.subunit_to_unit
end
test "consumes the request and exchange with exchange_wallet in consumption",
meta do
token_2 = insert(:token)
mint!(token_2)
_pair = insert(:exchange_pair, from_token: meta.token, to_token: token_2, rate: 2)
transaction_request =
insert(
:transaction_request,
type: "send",
token_uuid: meta.token.uuid,
user_uuid: meta.alice.uuid,
wallet: meta.alice_wallet,
amount: 100_000 * meta.token.subunit_to_unit
)
set_initial_balance(%{
address: meta.alice_wallet.address,
token: meta.token,
amount: 150_000
})
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: nil,
address: nil,
metadata: nil,
token_id: token_2.id,
user_id: meta.bob.id,
exchange_wallet_address: meta.account_wallet.address
})
assert response["success"] == true
inserted_consumption = TransactionConsumption |> Repo.all() |> Enum.at(0)
inserted_transaction = Repo.get(Transaction, inserted_consumption.transaction_uuid)
assert response["data"]["amount"] == nil
assert response["data"]["finalized_request_amount"] == 100_000 * meta.token.subunit_to_unit
assert response["data"]["finalized_consumption_amount"] == 200_000 * token_2.subunit_to_unit
assert response["data"]["estimated_request_amount"] == 100_000 * meta.token.subunit_to_unit
assert response["data"]["estimated_consumption_amount"] == 200_000 * token_2.subunit_to_unit
assert response["data"]["token_id"] == token_2.id
assert response["data"]["exchange_wallet_address"] == meta.account_wallet.address
assert response["data"]["exchange_account_id"] == meta.account.id
assert response["data"]["transaction_request"]["amount"] ==
100_000 * meta.token.subunit_to_unit
assert response["data"]["transaction_request"]["token_id"] == meta.token.id
assert response["data"]["transaction_request"]["address"] == meta.alice_wallet.address
assert response["data"]["transaction_request"]["user_id"] == meta.alice.id
assert response["data"]["transaction_request"]["exchange_wallet_address"] == nil
assert response["data"]["transaction_request"]["exchange_accout_id"] == nil
assert inserted_transaction.from_amount == 100_000 * meta.token.subunit_to_unit
assert inserted_transaction.from_token_uuid == meta.token.uuid
assert inserted_transaction.to_amount == 200_000 * token_2.subunit_to_unit
assert inserted_transaction.to_token_uuid == token_2.uuid
assert inserted_transaction.to == meta.bob_wallet.address
assert inserted_transaction.from == meta.alice_wallet.address
assert inserted_transaction.local_ledger_uuid != nil
{:ok, b1} = BalanceFetcher.get(meta.token.id, meta.alice_wallet)
{:ok, b2} = BalanceFetcher.get(token_2.id, meta.alice_wallet)
assert Enum.at(b1.balances, 0).amount == (150_000 - 100_000) * meta.token.subunit_to_unit
assert Enum.at(b2.balances, 0).amount == 0
{:ok, b1} = BalanceFetcher.get(meta.token.id, meta.bob_wallet)
{:ok, b2} = BalanceFetcher.get(token_2.id, meta.bob_wallet)
assert Enum.at(b1.balances, 0).amount == 0
assert Enum.at(b2.balances, 0).amount == 100_000 * 2 * meta.token.subunit_to_unit
end
test "consumes the request and exchange with exchange_account in consumption",
meta do
token_2 = insert(:token)
mint!(token_2)
_pair = insert(:exchange_pair, from_token: meta.token, to_token: token_2, rate: 2)
transaction_request =
insert(
:transaction_request,
type: "send",
token_uuid: meta.token.uuid,
user_uuid: meta.alice.uuid,
wallet: meta.alice_wallet,
amount: 100_000 * meta.token.subunit_to_unit
)
set_initial_balance(%{
address: meta.alice_wallet.address,
token: meta.token,
amount: 150_000
})
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: nil,
address: nil,
metadata: nil,
token_id: token_2.id,
user_id: meta.bob.id,
exchange_account_id: meta.account.id
})
assert response["success"] == true
inserted_consumption = TransactionConsumption |> Repo.all() |> Enum.at(0)
inserted_transaction = Repo.get(Transaction, inserted_consumption.transaction_uuid)
assert response["data"]["amount"] == nil
assert response["data"]["finalized_request_amount"] == 100_000 * meta.token.subunit_to_unit
assert response["data"]["finalized_consumption_amount"] == 200_000 * token_2.subunit_to_unit
assert response["data"]["estimated_request_amount"] == 100_000 * meta.token.subunit_to_unit
assert response["data"]["estimated_consumption_amount"] == 200_000 * token_2.subunit_to_unit
assert response["data"]["token_id"] == token_2.id
assert response["data"]["exchange_wallet_address"] == meta.account_wallet.address
assert response["data"]["exchange_account_id"] == meta.account.id
assert response["data"]["transaction_request"]["amount"] ==
100_000 * meta.token.subunit_to_unit
assert response["data"]["transaction_request"]["token_id"] == meta.token.id
assert response["data"]["transaction_request"]["address"] == meta.alice_wallet.address
assert response["data"]["transaction_request"]["user_id"] == meta.alice.id
assert response["data"]["transaction_request"]["exchange_wallet_address"] == nil
assert response["data"]["transaction_request"]["exchange_accout_id"] == nil
assert inserted_transaction.from_amount == 100_000 * meta.token.subunit_to_unit
assert inserted_transaction.from_token_uuid == meta.token.uuid
assert inserted_transaction.to_amount == 200_000 * token_2.subunit_to_unit
assert inserted_transaction.to_token_uuid == token_2.uuid
assert inserted_transaction.to == meta.bob_wallet.address
assert inserted_transaction.from == meta.alice_wallet.address
assert inserted_transaction.local_ledger_uuid != nil
{:ok, b1} = BalanceFetcher.get(meta.token.id, meta.alice_wallet)
{:ok, b2} = BalanceFetcher.get(token_2.id, meta.alice_wallet)
assert Enum.at(b1.balances, 0).amount == (150_000 - 100_000) * meta.token.subunit_to_unit
assert Enum.at(b2.balances, 0).amount == 0
{:ok, b1} = BalanceFetcher.get(meta.token.id, meta.bob_wallet)
{:ok, b2} = BalanceFetcher.get(token_2.id, meta.bob_wallet)
assert Enum.at(b1.balances, 0).amount == 0
assert Enum.at(b2.balances, 0).amount == 100_000 * 2 * meta.token.subunit_to_unit
end
test "transfer and exchange if request and consumption specify the same exchange wallet address",
meta do
token_2 = insert(:token)
mint!(token_2)
_pair = insert(:exchange_pair, from_token: meta.token, to_token: token_2, rate: 2)
transaction_request =
insert(
:transaction_request,
type: "send",
token_uuid: meta.token.uuid,
user_uuid: meta.alice.uuid,
wallet: meta.alice_wallet,
amount: 100_000 * meta.token.subunit_to_unit,
exchange_account_uuid: meta.account.uuid,
exchange_wallet_address: meta.account_wallet.address
)
set_initial_balance(%{
address: meta.alice_wallet.address,
token: meta.token,
amount: 150_000
})
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: nil,
address: nil,
metadata: nil,
token_id: token_2.id,
user_id: meta.bob.id,
exchange_account_id: meta.account.id
})
assert response["success"] == true
inserted_consumption = TransactionConsumption |> Repo.all() |> Enum.at(0)
inserted_transaction = Repo.get(Transaction, inserted_consumption.transaction_uuid)
assert response["data"]["amount"] == nil
assert response["data"]["finalized_request_amount"] == 100_000 * meta.token.subunit_to_unit
assert response["data"]["finalized_consumption_amount"] == 200_000 * token_2.subunit_to_unit
assert response["data"]["estimated_request_amount"] == 100_000 * meta.token.subunit_to_unit
assert response["data"]["estimated_consumption_amount"] == 200_000 * token_2.subunit_to_unit
assert response["data"]["token_id"] == token_2.id
assert response["data"]["exchange_wallet_address"] == meta.account_wallet.address
assert response["data"]["exchange_account_id"] == meta.account.id
assert response["data"]["transaction_request"]["amount"] ==
100_000 * meta.token.subunit_to_unit
assert response["data"]["transaction_request"]["token_id"] == meta.token.id
assert response["data"]["transaction_request"]["address"] == meta.alice_wallet.address
assert response["data"]["transaction_request"]["user_id"] == meta.alice.id
assert response["data"]["transaction_request"]["exchange_wallet_address"] ==
meta.account_wallet.address
assert response["data"]["transaction_request"]["exchange_account_id"] == meta.account.id
assert inserted_transaction.from_amount == 100_000 * meta.token.subunit_to_unit
assert inserted_transaction.from_token_uuid == meta.token.uuid
assert inserted_transaction.to_amount == 200_000 * token_2.subunit_to_unit
assert inserted_transaction.to_token_uuid == token_2.uuid
assert inserted_transaction.to == meta.bob_wallet.address
assert inserted_transaction.from == meta.alice_wallet.address
assert inserted_transaction.local_ledger_uuid != nil
{:ok, b1} = BalanceFetcher.get(meta.token.id, meta.alice_wallet)
{:ok, b2} = BalanceFetcher.get(token_2.id, meta.alice_wallet)
assert Enum.at(b1.balances, 0).amount == (150_000 - 100_000) * meta.token.subunit_to_unit
assert Enum.at(b2.balances, 0).amount == 0
{:ok, b1} = BalanceFetcher.get(meta.token.id, meta.bob_wallet)
{:ok, b2} = BalanceFetcher.get(token_2.id, meta.bob_wallet)
assert Enum.at(b1.balances, 0).amount == 0
assert Enum.at(b2.balances, 0).amount == 100_000 * 2 * meta.token.subunit_to_unit
end
test "fails to consume if exchange details are different and already specified in request",
meta do
{:ok, account_2} = :account |> params_for() |> Account.insert()
token_2 = insert(:token)
mint!(token_2)
_pair = insert(:exchange_pair, from_token: meta.token, to_token: token_2, rate: 2)
transaction_request =
insert(
:transaction_request,
type: "send",
token_uuid: meta.token.uuid,
user_uuid: meta.alice.uuid,
wallet: meta.alice_wallet,
amount: 100_000 * meta.token.subunit_to_unit,
exchange_account_uuid: meta.account.uuid,
exchange_wallet_address: meta.account_wallet.address
)
set_initial_balance(%{
address: meta.alice_wallet.address,
token: meta.token,
amount: 150_000
})
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: nil,
address: nil,
metadata: nil,
token_id: token_2.id,
user_id: meta.bob.id,
exchange_account_id: account_2.id
})
assert response["success"] == false
assert response["data"]["code"] == "consumption:request_already_contains_exchange_wallet"
assert response["data"]["description"] ==
"The transaction request for the given consumption already specify an exchange account and/or wallet."
end
test "fails to consume and return an error when amount is not specified", meta do
transaction_request =
insert(
:transaction_request,
type: "receive",
token_uuid: meta.token.uuid,
user_uuid: meta.alice.uuid,
wallet: meta.alice_wallet,
amount: nil
)
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: nil,
address: nil,
metadata: nil,
token_id: nil,
account_id: meta.account.id
})
assert response["success"] == false
assert response["data"]["code"] == "client:invalid_parameter"
assert response["data"]["description"] ==
"Invalid parameter provided. `amount` is required for transaction consumption."
end
test "fails to consume and return an error when amount is a decimal number", meta do
transaction_request =
insert(
:transaction_request,
type: "receive",
token_uuid: meta.token.uuid,
user_uuid: meta.alice.uuid,
wallet: meta.alice_wallet,
amount: nil
)
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: 1.2365,
address: nil,
metadata: nil,
token_id: nil,
account_id: meta.account.id
})
assert response["success"] == false
assert response["data"]["code"] == "client:invalid_parameter"
assert response["data"]["description"] ==
"Invalid parameter provided. `amount` is not an integer: 1.2365."
end
test "fails to consume and return an insufficient funds error", meta do
transaction_request =
insert(
:transaction_request,
type: "receive",
token_uuid: meta.token.uuid,
user_uuid: meta.alice.uuid,
wallet: meta.alice_wallet,
amount: 100_000 * meta.token.subunit_to_unit
)
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: nil,
address: nil,
metadata: nil,
token_id: nil,
account_id: meta.account.id
})
inserted_consumption = TransactionConsumption |> Repo.all() |> Enum.at(0)
inserted_transaction = Repo.get(Transaction, inserted_consumption.transaction_uuid)
assert response == %{
"success" => false,
"version" => "1",
"data" => %{
"object" => "error",
"messages" => nil,
"code" => "transaction:insufficient_funds",
"description" =>
"The specified wallet (#{meta.account_wallet.address}) does not contain enough funds. Available: 0 #{
meta.token.id
} - Attempted debit: 100000 #{meta.token.id}"
}
}
assert inserted_transaction.from_amount == 100_000 * meta.token.subunit_to_unit
assert inserted_transaction.from_token_uuid == meta.token.uuid
assert inserted_transaction.to_amount == 100_000 * meta.token.subunit_to_unit
assert inserted_transaction.to_token_uuid == meta.token.uuid
assert inserted_transaction.to == meta.alice_wallet.address
assert inserted_transaction.from == meta.account_wallet.address
assert inserted_transaction.error_code == "insufficient_funds"
assert inserted_transaction.error_description == nil
assert inserted_transaction.error_data == %{
"address" => meta.account_wallet.address,
"amount_to_debit" => 100_000 * meta.token.subunit_to_unit,
"current_amount" => 0,
"token_id" => meta.token.id
}
end
test "fails to consume when token is disabled", meta do
transaction_request =
insert(
:transaction_request,
type: "receive",
token_uuid: meta.token.uuid,
user_uuid: meta.alice.uuid,
wallet: meta.alice_wallet,
amount: 100_000 * meta.token.subunit_to_unit
)
{:ok, token} = Token.enable_or_disable(meta.token, %{enabled: false})
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: nil,
address: nil,
metadata: nil,
token_id: token.id,
account_id: meta.account.id
})
assert response["success"] == false
assert response["data"]["code"] == "token:disabled"
end
test "fails to consume when wallet is disabled", meta do
{:ok, wallet} =
Wallet.insert_secondary_or_burn(%{
"account_uuid" => meta.account.uuid,
"name" => "MySecondary",
"identifier" => "secondary"
})
{:ok, wallet} = Wallet.enable_or_disable(wallet, %{enabled: false})
transaction_request =
insert(
:transaction_request,
type: "receive",
token_uuid: meta.token.uuid,
user_uuid: meta.alice.uuid,
wallet: meta.alice_wallet,
amount: 100_000 * meta.token.subunit_to_unit
)
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: nil,
address: wallet.address,
metadata: nil,
token_id: nil,
account_id: meta.account.id
})
assert response["success"] == false
assert response["data"]["code"] == "wallet:disabled"
end
test "returns with preload if `embed` attribute is given", meta do
transaction_request =
insert(
:transaction_request,
type: "receive",
token_uuid: meta.token.uuid,
user_uuid: meta.alice.uuid,
wallet: meta.alice_wallet,
amount: 100_000 * meta.token.subunit_to_unit
)
set_initial_balance(%{
address: meta.bob_wallet.address,
token: meta.token,
amount: 150_000
})
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: nil,
address: nil,
metadata: nil,
token_id: nil,
account_id: meta.account.id,
embed: ["account"]
})
assert response["data"]["account"] != nil
end
test "returns same transaction request consumption when idempotency token is the same",
meta do
transaction_request =
insert(
:transaction_request,
type: "receive",
token_uuid: meta.token.uuid,
account_uuid: meta.account.uuid,
user_uuid: meta.alice.uuid,
wallet: meta.alice_wallet,
amount: 100_000 * meta.token.subunit_to_unit
)
set_initial_balance(%{
address: meta.bob_wallet.address,
token: meta.token,
amount: 150_000
})
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "1234",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: nil,
address: nil,
metadata: nil,
token_id: nil,
account_id: meta.account.id
})
inserted_consumption = TransactionConsumption |> Repo.all() |> Enum.at(0)
inserted_transaction = Repo.get(Transaction, inserted_consumption.transaction_uuid)
assert response["success"] == true
assert response["data"]["id"] == inserted_consumption.id
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "1234",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: nil,
address: nil,
metadata: nil,
token_id: nil,
account_id: meta.account.id
})
inserted_consumption_2 = TransactionConsumption |> Repo.all() |> Enum.at(0)
inserted_transaction_2 = Repo.get(Transaction, inserted_consumption.transaction_uuid)
assert response["success"] == true
assert response["data"]["id"] == inserted_consumption_2.id
assert inserted_consumption.uuid == inserted_consumption_2.uuid
assert inserted_transaction.uuid == inserted_transaction_2.uuid
end
test "returns idempotency error if header is not specified" do
response =
admin_user_request("/transaction_request.consume", %{
transaction_request_id: "123",
correlation_id: nil,
amount: nil,
address: nil,
metadata: nil,
token_id: nil
})
assert response == %{
"success" => false,
"version" => "1",
"data" => %{
"code" => "client:invalid_parameter",
"description" => "Invalid parameter provided.",
"messages" => nil,
"object" => "error"
}
}
end
test "sends socket confirmation when require_confirmation and approved", meta do
mint!(meta.token)
# Create a require_confirmation transaction request that will be consumed soon
transaction_request =
insert(
:transaction_request,
type: "send",
token_uuid: meta.token.uuid,
account_uuid: meta.account.uuid,
wallet: meta.account_wallet,
amount: nil,
require_confirmation: true
)
request_topic = "transaction_request:#{transaction_request.id}"
# Start listening to the channels for the transaction request created above
Endpoint.subscribe(request_topic)
# Making the consumption, since we made the request require_confirmation, it will
# create a pending consumption that will need to be confirmed
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: 100_000 * meta.token.subunit_to_unit,
metadata: nil,
token_id: nil,
provider_user_id: meta.bob.provider_user_id
})
consumption_id = response["data"]["id"]
assert response["success"] == true
assert response["data"]["status"] == "pending"
assert response["data"]["transaction_id"] == nil
# Retrieve what just got inserted
inserted_consumption = TransactionConsumption.get(response["data"]["id"])
# We check that we receive the confirmation request above in the
# transaction request channel
assert_receive %Phoenix.Socket.Broadcast{
event: "transaction_consumption_request",
topic: "transaction_request:" <> _,
payload:
%{
# Ignore content
}
}
# We need to know once the consumption has been approved, so let's
# listen to the channel for it
Endpoint.subscribe("transaction_consumption:#{consumption_id}")
# Confirm the consumption
response =
admin_user_request("/transaction_consumption.approve", %{
id: consumption_id
})
assert response["success"] == true
assert response["data"]["id"] == inserted_consumption.id
assert response["data"]["status"] == "confirmed"
assert response["data"]["approved_at"] != nil
assert response["data"]["confirmed_at"] != nil
# Check that a transaction was inserted
inserted_transaction = Repo.get_by(Transaction, id: response["data"]["transaction_id"])
assert inserted_transaction.from_amount == 100_000 * meta.token.subunit_to_unit
assert inserted_transaction.from_token_uuid == meta.token.uuid
assert inserted_transaction.to_amount == 100_000 * meta.token.subunit_to_unit
assert inserted_transaction.to_token_uuid == meta.token.uuid
assert inserted_transaction.to == meta.bob_wallet.address
assert inserted_transaction.from == meta.account_wallet.address
assert inserted_transaction.local_ledger_uuid != nil
assert_receive %Phoenix.Socket.Broadcast{
event: "transaction_consumption_finalized",
topic: "transaction_consumption:" <> _,
payload:
%{
# Ignore content
}
}
# Unsubscribe from all channels
Endpoint.unsubscribe("transaction_request:#{transaction_request.id}")
Endpoint.unsubscribe("transaction_consumption:#{consumption_id}")
end
test "sends socket confirmation when require_confirmation and approved between users", meta do
# bob = test_user
set_initial_balance(%{
address: meta.bob_wallet.address,
token: meta.token,
amount: 1_000_000 * meta.token.subunit_to_unit
})
# Create a require_confirmation transaction request that will be consumed soon
transaction_request =
insert(
:transaction_request,
type: "send",
token_uuid: meta.token.uuid,
account_uuid: meta.account.uuid,
user_uuid: meta.bob.uuid,
wallet: meta.bob_wallet,
amount: nil,
require_confirmation: true
)
request_topic = "transaction_request:#{transaction_request.id}"
# Start listening to the channels for the transaction request created above
Endpoint.subscribe(request_topic)
# Making the consumption, since we made the request require_confirmation, it will
# create a pending consumption that will need to be confirmed
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: 100_000 * meta.token.subunit_to_unit,
metadata: nil,
token_id: nil,
address: meta.alice_wallet.address
})
consumption_id = response["data"]["id"]
assert response["success"] == true
assert response["data"]["status"] == "pending"
assert response["data"]["transaction_id"] == nil
# Retrieve what just got inserted
inserted_consumption = TransactionConsumption.get(response["data"]["id"])
# We check that we receive the confirmation request above in the
# transaction request channel
assert_receive %Phoenix.Socket.Broadcast{
event: "transaction_consumption_request",
topic: "transaction_request:" <> _,
payload:
%{
# Ignore content
}
}
# We need to know once the consumption has been approved, so let's
# listen to the channel for it
Endpoint.subscribe("transaction_consumption:#{consumption_id}")
# Confirm the consumption
response =
admin_user_request("/transaction_consumption.approve", %{
id: consumption_id
})
assert response["success"] == true
assert response["data"]["id"] == inserted_consumption.id
assert response["data"]["status"] == "confirmed"
assert response["data"]["approved_at"] != nil
assert response["data"]["confirmed_at"] != nil
# Check that a transaction was inserted
inserted_transaction = Repo.get_by(Transaction, id: response["data"]["transaction_id"])
assert inserted_transaction.from_amount == 100_000 * meta.token.subunit_to_unit
assert inserted_transaction.to_amount == 100_000 * meta.token.subunit_to_unit
assert inserted_transaction.from_token_uuid == meta.token.uuid
assert inserted_transaction.to_token_uuid == meta.token.uuid
assert inserted_transaction.to == meta.alice_wallet.address
assert inserted_transaction.from == meta.bob_wallet.address
assert inserted_transaction.local_ledger_uuid != nil
assert_receive %Phoenix.Socket.Broadcast{
event: "transaction_consumption_finalized",
topic: "transaction_consumption:" <> _,
payload:
%{
# Ignore content
}
}
# Unsubscribe from all channels
Endpoint.unsubscribe("transaction_request:#{transaction_request.id}")
Endpoint.unsubscribe("transaction_consumption:#{consumption_id}")
end
test "sends a websocket expiration event when a consumption expires", meta do
# bob = test_user
set_initial_balance(%{
address: meta.bob_wallet.address,
token: meta.token,
amount: 1_000_000 * meta.token.subunit_to_unit
})
# Create a require_confirmation transaction request that will be consumed soon
transaction_request =
insert(
:transaction_request,
type: "send",
token_uuid: meta.token.uuid,
account_uuid: meta.account.uuid,
wallet: meta.account_wallet,
amount: nil,
require_confirmation: true,
# The consumption will expire after 1 second.
consumption_lifetime: 1
)
request_topic = "transaction_request:#{transaction_request.id}"
# Start listening to the channels for the transaction request created above
Endpoint.subscribe(request_topic)
# Making the consumption, since we made the request require_confirmation, it will
# create a pending consumption that will need to be confirmed
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: 100_000 * meta.token.subunit_to_unit,
metadata: nil,
token_id: nil,
address: meta.alice_wallet.address
})
consumption_id = response["data"]["id"]
assert response["success"] == true
assert response["data"]["status"] == "pending"
# The consumption is still valid...
:timer.sleep(1000)
# And now it's not!
# We should receive a transaction_consumption_finalized event.
# Let's also listen to the consumption channel.
Endpoint.subscribe(response["data"]["socket_topic"])
# We trigger the CRON task
TransactionConsumptionScheduler.expire_all()
# And we should now receive a finalized failed consumption.
assert_receive %Phoenix.Socket.Broadcast{
event: "transaction_consumption_finalized",
topic: "transaction_request:" <> _,
payload: payload
}
# Ensure the websocket serializer can serialize the payload
{:socket_push, :text, encoded} =
WebsocketResponseSerializer.fastlane!(%Broadcast{
topic: "transaction_request:#{transaction_request.id}",
event: "transaction_consumption_finalized",
payload: payload
})
decoded = Poison.decode!(encoded)
assert decoded["success"] == false
assert decoded["error"]["code"] == "transaction_consumption:expired"
assert_receive %Phoenix.Socket.Broadcast{
event: "transaction_consumption_finalized",
topic: "transaction_consumption:" <> _,
payload: payload
}
# Ensure the websocket serializer can serialize the payload
{:socket_push, :text, encoded} =
WebsocketResponseSerializer.fastlane!(%Broadcast{
topic: "transaction_consumption:#{consumption_id}",
event: "transaction_consumption_finalized",
payload: payload
})
decoded = Poison.decode!(encoded)
assert decoded["success"] == false
assert decoded["error"]["code"] == "transaction_consumption:expired"
# If we try to approve it now, it will fail since it has already expired.
response =
admin_user_request("/transaction_consumption.approve", %{
id: consumption_id
})
assert response["success"] == false
assert response["data"]["code"] == "transaction_consumption:expired"
# Unsubscribe from all channels
Endpoint.unsubscribe("transaction_request:#{transaction_request.id}")
Endpoint.unsubscribe("transaction_consumption:#{consumption_id}")
end
test "sends an error when approved without enough funds", meta do
{:ok, _} = AccountUser.link(meta.account.uuid, meta.bob.uuid)
# Create a require_confirmation transaction request that will be consumed soon
transaction_request =
insert(
:transaction_request,
type: "send",
token_uuid: meta.token.uuid,
account_uuid: meta.account.uuid,
user_uuid: meta.bob.uuid,
wallet: meta.bob_wallet,
amount: nil,
require_confirmation: true
)
request_topic = "transaction_request:#{transaction_request.id}"
# Start listening to the channels for the transaction request created above
Endpoint.subscribe(request_topic)
# Making the consumption, since we made the request require_confirmation, it will
# create a pending consumption that will need to be confirmed
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: 100_000 * meta.token.subunit_to_unit,
metadata: nil,
token_id: nil,
address: meta.alice_wallet.address
})
consumption_id = response["data"]["id"]
assert response["success"] == true
assert response["data"]["status"] == "pending"
assert response["data"]["transaction_id"] == nil
# We check that we receive the confirmation request above in the
# transaction request channel
assert_receive %Phoenix.Socket.Broadcast{
event: "transaction_consumption_request",
topic: "transaction_request:" <> _,
payload: payload
}
# Ensure the websocket serializer can serialize the payload
{:socket_push, :text, encoded} =
WebsocketResponseSerializer.fastlane!(%Broadcast{
topic: "transaction_request:#{transaction_request.id}",
event: "transaction_consumption_request",
payload: payload
})
decoded = Poison.decode!(encoded)
assert decoded["success"] == true
# We need to know once the consumption has been approved, so let's
# listen to the channel for it
Endpoint.subscribe("transaction_consumption:#{consumption_id}")
# Confirm the consumption
response =
admin_user_request("/transaction_consumption.approve", %{
id: consumption_id
})
assert response["success"] == false
assert response["data"]["code"] == "transaction:insufficient_funds"
assert_receive %Phoenix.Socket.Broadcast{
event: "transaction_consumption_finalized",
topic: "transaction_consumption:" <> _,
payload: payload
}
{:socket_push, :text, encoded} =
WebsocketResponseSerializer.fastlane!(%Broadcast{
topic: "transaction_consumption:#{consumption_id}",
event: "transaction_consumption_finalized",
payload: payload
})
decoded = Poison.decode!(encoded)
assert decoded["success"] == false
assert decoded["error"]["code"] == "transaction:insufficient_funds"
assert "The specified wallet" <> _ = decoded["error"]["description"]
# Unsubscribe from all channels
Endpoint.unsubscribe("transaction_request:#{transaction_request.id}")
Endpoint.unsubscribe("transaction_consumption:#{consumption_id}")
end
test "sends socket confirmation when require_confirmation and rejected", meta do
mint!(meta.token)
# Create a require_confirmation transaction request that will be consumed soon
transaction_request =
insert(
:transaction_request,
type: "send",
token_uuid: meta.token.uuid,
account_uuid: meta.account.uuid,
wallet: meta.account_wallet,
amount: nil,
require_confirmation: true,
max_consumptions: 1
)
request_topic = "transaction_request:#{transaction_request.id}"
# Start listening to the channels for the transaction request created above
Endpoint.subscribe(request_topic)
# Making the consumption, since we made the request require_confirmation, it will
# create a pending consumption that will need to be confirmed
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "123",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: 100_000 * meta.token.subunit_to_unit,
metadata: nil,
token_id: nil,
provider_user_id: meta.bob.provider_user_id
})
consumption_id = response["data"]["id"]
assert response["success"] == true
assert response["data"]["status"] == "pending"
assert response["data"]["transaction_id"] == nil
# Retrieve what just got inserted
inserted_consumption = TransactionConsumption.get(response["data"]["id"])
# We check that we receive the confirmation request above in the
# transaction request channel
assert_receive %Phoenix.Socket.Broadcast{
event: "transaction_consumption_request",
topic: "transaction_request:" <> _,
payload:
%{
# Ignore content
}
}
# We need to know once the consumption has been approved, so let's
# listen to the channel for it
Endpoint.subscribe("transaction_consumption:#{consumption_id}")
# Confirm the consumption
response =
admin_user_request("/transaction_consumption.reject", %{
id: consumption_id
})
assert response["success"] == true
assert response["data"]["id"] == inserted_consumption.id
assert response["data"]["status"] == "rejected"
assert response["data"]["rejected_at"] != nil
assert response["data"]["approved_at"] == nil
assert response["data"]["confirmed_at"] == nil
# Check that a transaction was not inserted
assert response["data"]["transaction_id"] == nil
assert_receive %Phoenix.Socket.Broadcast{
event: "transaction_consumption_finalized",
topic: "transaction_consumption:" <> _,
payload:
%{
# Ignore content
}
}
# Check that we can consume for real now
#
# Making the consumption, since we made the request require_confirmation, it will
# create a pending consumption that will need to be confirmed
response =
admin_user_request("/transaction_request.consume", %{
idempotency_token: "1234",
formatted_transaction_request_id: transaction_request.id,
correlation_id: nil,
amount: 100_000 * meta.token.subunit_to_unit,
metadata: nil,
token_id: nil,
provider_user_id: meta.bob.provider_user_id
})
consumption_id = response["data"]["id"]
assert response["success"] == true
assert response["data"]["status"] == "pending"
assert response["data"]["transaction_id"] == nil
# Retrieve what just got inserted
inserted_consumption = TransactionConsumption.get(response["data"]["id"])
# We check that we receive the confirmation request above in the
# transaction request channel
assert_receive %Phoenix.Socket.Broadcast{
event: "transaction_consumption_request",
topic: "transaction_request:" <> _,
payload:
%{
# Ignore content
}
}
# We need to know once the consumption has been approved, so let's
# listen to the channel for it
Endpoint.subscribe("transaction_consumption:#{consumption_id}")
# Confirm the consumption
response =
admin_user_request("/transaction_consumption.approve", %{
id: consumption_id
})
assert response["success"] == true
assert response["data"]["id"] == inserted_consumption.id
assert response["data"]["status"] == "confirmed"
assert response["data"]["confirmed_at"] != nil
assert response["data"]["approved_at"] != nil
assert response["data"]["rejected_at"] == nil
# Check that a transaction was not inserted
assert response["data"]["transaction_id"] != nil
assert_receive %Phoenix.Socket.Broadcast{
event: "transaction_consumption_finalized",
topic: "transaction_consumption:" <> _,
payload:
%{
# Ignore content
}
}
# Unsubscribe from all channels
Endpoint.unsubscribe("transaction_request:#{transaction_request.id}")
Endpoint.unsubscribe("transaction_consumption:#{consumption_id}")
end
end
end
| 35.914273 | 120 | 0.605015 |
033e3ffee3f0330e923d2a716fbb099b92310438 | 175 | ex | Elixir | lib/pixie/encoders/response_disconnect_encoder.ex | rdalin82/pixie | add50e2bd7fbd807c7b82cd10a2123828be4c58f | [
"MIT"
] | null | null | null | lib/pixie/encoders/response_disconnect_encoder.ex | rdalin82/pixie | add50e2bd7fbd807c7b82cd10a2123828be4c58f | [
"MIT"
] | null | null | null | lib/pixie/encoders/response_disconnect_encoder.ex | rdalin82/pixie | add50e2bd7fbd807c7b82cd10a2123828be4c58f | [
"MIT"
] | null | null | null | require Pixie.Response.Encoder
defimpl Poison.Encoder, for: Pixie.Response.Disconnect do
def encode response, _opts do
Pixie.Response.Encoder.encode response
end
end
| 21.875 | 57 | 0.794286 |
033e545eef332360f6951431c0679c35ac8b759a | 2,241 | ex | Elixir | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1__object_tracking_frame.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1__object_tracking_frame.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1__object_tracking_frame.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1_ObjectTrackingFrame do
@moduledoc """
Video frame level annotations for object detection and tracking. This field
stores per frame location, time offset, and confidence.
## Attributes
* `normalizedBoundingBox` (*type:* `GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1_NormalizedBoundingBox.t`, *default:* `nil`) - The normalized bounding box location of this object track for the frame.
* `timeOffset` (*type:* `String.t`, *default:* `nil`) - The timestamp of the frame in microseconds.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:normalizedBoundingBox =>
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1_NormalizedBoundingBox.t(),
:timeOffset => String.t()
}
field(
:normalizedBoundingBox,
as: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1_NormalizedBoundingBox
)
field(:timeOffset)
end
defimpl Poison.Decoder,
for: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1_ObjectTrackingFrame do
def decode(value, options) do
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1_ObjectTrackingFrame.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1_ObjectTrackingFrame do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.737705 | 226 | 0.76573 |
033e5a8ccbac3524eef490ce5a947addf2eca446 | 1,640 | ex | Elixir | lib/desk_clock/faces/no_sync.ex | mtrudel/desk_clock | 9e6ea5ea58bbab12a7188b3c00237f2a8bf066a6 | [
"MIT"
] | 7 | 2020-07-30T23:58:52.000Z | 2022-01-24T19:10:11.000Z | lib/desk_clock/faces/no_sync.ex | mtrudel/desk_clock | 9e6ea5ea58bbab12a7188b3c00237f2a8bf066a6 | [
"MIT"
] | null | null | null | lib/desk_clock/faces/no_sync.ex | mtrudel/desk_clock | 9e6ea5ea58bbab12a7188b3c00237f2a8bf066a6 | [
"MIT"
] | 1 | 2020-12-22T15:28:16.000Z | 2020-12-22T15:28:16.000Z | defmodule DeskClock.Faces.NoSync do
@moduledoc """
A face that displays an 'NTP not synced message'
"""
@behaviour DeskClock.Face
alias ExPaint.{Color, Font}
@impl DeskClock.Face
def create(upper_zone, lower_zone) do
%{
label_font: Font.load("fixed7x14"),
upper_zone: upper_zone,
lower_zone: lower_zone,
drawn: false
}
end
@impl DeskClock.Face
def get_zone(:upper_zone, state) do
state[:upper_zone]
end
@impl DeskClock.Face
def get_zone(:lower_zone, state) do
state[:lower_zone]
end
@impl DeskClock.Face
def set_zone(_subface, _zone, state) do
# Don't change zones since there's no UI for users to interact with
state
end
@impl DeskClock.Face
def build_drawlist_for_time(_time, %{drawn: false} = state) do
{:ok, background} = ExPaint.create(256, 64)
ExPaint.filled_rect(background, {0, 0}, {256, 64}, Color.black())
text = draw_text("Not synchronized to NTP", state[:label_font])
{[{background, {0, 0}}, {text, {44, 20}}], %{state | drawn: true}}
end
@impl DeskClock.Face
def build_drawlist_for_time(_time, %{drawn: true} = state) do
{[], state}
end
defp draw_text(text, font, {origin_x, _originy} = origin \\ {4, 0}) do
{glyph_width, height} = Font.size(font)
width = glyph_width * String.length(text) + origin_x
# Pad width out to the next multiple of 4
width = 4 + width + (4 - rem(width, 4))
{:ok, image} = ExPaint.create(width, height)
ExPaint.filled_rect(image, {0, 0}, {width, height}, Color.black())
ExPaint.text(image, origin, font, text, Color.white())
image
end
end
| 25.625 | 72 | 0.656098 |
033e5df764d24045af8c9259575906d254d73208 | 8,469 | ex | Elixir | clients/poly/lib/google_api/poly/v1/api/users.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/poly/lib/google_api/poly/v1/api/users.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/poly/lib/google_api/poly/v1/api/users.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Poly.V1.Api.Users do
@moduledoc """
API calls for all endpoints tagged `Users`.
"""
alias GoogleApi.Poly.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Lists assets authored by the given user. Only the value 'me', representing the currently-authenticated user, is supported. May include assets with an access level of PRIVATE or UNLISTED and assets which are All Rights Reserved for the currently-authenticated user.
## Parameters
- connection (GoogleApi.Poly.V1.Connection): Connection to server
- name (String.t): A valid user id. Currently, only the special value 'me', representing the currently-authenticated user is supported. To use 'me', you must pass an OAuth token with the request.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :format (String.t): Return only assets with the matching format. Acceptable values are: `BLOCKS`, `FBX`, `GLTF`, `GLTF2`, `OBJ`, and `TILT`.
- :orderBy (String.t): Specifies an ordering for assets. Acceptable values are: `BEST`, `NEWEST`, `OLDEST`. Defaults to `BEST`, which ranks assets based on a combination of popularity and other features.
- :pageSize (integer()): The maximum number of assets to be returned. This value must be between `1` and `100`. Defaults to `20`.
- :pageToken (String.t): Specifies a continuation token from a previous search whose results were split into multiple pages. To get the next page, submit the same request specifying the value from next_page_token.
- :visibility (String.t): The visibility of the assets to be returned. Defaults to VISIBILITY_UNSPECIFIED which returns all assets.
## Returns
{:ok, %GoogleApi.Poly.V1.Model.ListUserAssetsResponse{}} on success
{:error, info} on failure
"""
@spec poly_users_assets_list(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.Poly.V1.Model.ListUserAssetsResponse.t()} | {:error, Tesla.Env.t()}
def poly_users_assets_list(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:format => :query,
:orderBy => :query,
:pageSize => :query,
:pageToken => :query,
:visibility => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}/assets", %{
"name" => URI.encode_www_form(name)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Poly.V1.Model.ListUserAssetsResponse{}])
end
@doc """
Lists assets that the user has liked. Only the value 'me', representing the currently-authenticated user, is supported. May include assets with an access level of UNLISTED.
## Parameters
- connection (GoogleApi.Poly.V1.Connection): Connection to server
- name (String.t): A valid user id. Currently, only the special value 'me', representing the currently-authenticated user is supported. To use 'me', you must pass an OAuth token with the request.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :format (String.t): Return only assets with the matching format. Acceptable values are: `BLOCKS`, `FBX`, `GLTF`, `GLTF2`, `OBJ`, `TILT`.
- :orderBy (String.t): Specifies an ordering for assets. Acceptable values are: `BEST`, `NEWEST`, `OLDEST`, 'LIKED_TIME'. Defaults to `LIKED_TIME`, which ranks assets based on how recently they were liked.
- :pageSize (integer()): The maximum number of assets to be returned. This value must be between `1` and `100`. Defaults to `20`.
- :pageToken (String.t): Specifies a continuation token from a previous search whose results were split into multiple pages. To get the next page, submit the same request specifying the value from next_page_token.
## Returns
{:ok, %GoogleApi.Poly.V1.Model.ListLikedAssetsResponse{}} on success
{:error, info} on failure
"""
@spec poly_users_likedassets_list(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.Poly.V1.Model.ListLikedAssetsResponse.t()} | {:error, Tesla.Env.t()}
def poly_users_likedassets_list(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:format => :query,
:orderBy => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}/likedassets", %{
"name" => URI.encode_www_form(name)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Poly.V1.Model.ListLikedAssetsResponse{}])
end
end
| 54.63871 | 274 | 0.686268 |
033e62d63e3b5b4ab52fcb23ea62b3093ea8737e | 441 | exs | Elixir | frontiers_site/test/frontiers_site_web/views/error_view_test.exs | wasmCloud/frontiers | 20eb6f4f7e59332341cd3973869758c670b54e2c | [
"Apache-2.0"
] | 1 | 2021-06-13T22:12:50.000Z | 2021-06-13T22:12:50.000Z | frontiers_site/test/frontiers_site_web/views/error_view_test.exs | wasmCloud/frontiers | 20eb6f4f7e59332341cd3973869758c670b54e2c | [
"Apache-2.0"
] | null | null | null | frontiers_site/test/frontiers_site_web/views/error_view_test.exs | wasmCloud/frontiers | 20eb6f4f7e59332341cd3973869758c670b54e2c | [
"Apache-2.0"
] | null | null | null | defmodule FrontiersSiteWeb.ErrorViewTest do
use FrontiersSiteWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(FrontiersSiteWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(FrontiersSiteWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 29.4 | 98 | 0.748299 |
033e6f7ddf86a0906ecdfc5af24bab531afd0ad5 | 703 | exs | Elixir | test/ex_rss_web/controllers/user_controller_test.exs | cruessler/exrss | 6ac17b7533d78460a1c34cabaae86fec317f460a | [
"MIT"
] | 4 | 2020-02-16T07:18:35.000Z | 2021-12-09T14:43:10.000Z | test/ex_rss_web/controllers/user_controller_test.exs | cruessler/exrss | 6ac17b7533d78460a1c34cabaae86fec317f460a | [
"MIT"
] | 27 | 2019-10-16T18:35:19.000Z | 2022-03-13T16:39:57.000Z | test/ex_rss_web/controllers/user_controller_test.exs | cruessler/exrss | 6ac17b7533d78460a1c34cabaae86fec317f460a | [
"MIT"
] | null | null | null | defmodule ExRssWeb.UserControllerTest do
use ExRssWeb.ConnCase
test "POST /users", %{conn: conn} do
params = [
registration: [
name: "New user",
email: "[email protected]",
password: "password",
password_confirmation: "password"
]
]
conn = post(conn, "/users", params)
assert html_response(conn, 302)
end
test "POST /users with confirmation not matching password", %{conn: conn} do
params = [
registration: [
name: "New user",
email: "[email protected]",
password: "password",
password_confirmation: ""
]
]
conn = post(conn, "/users", params)
assert html_response(conn, 200)
end
end
| 20.676471 | 78 | 0.58037 |
033ea63068b99c7ff8e5c96cd0f498e301f2b62a | 832 | exs | Elixir | mix.exs | kim-company/rambo | b66d61b656499dd31892e8cf5518f6da38b4b6a6 | [
"MIT"
] | 145 | 2019-08-30T19:19:16.000Z | 2022-03-30T15:58:49.000Z | mix.exs | kim-company/rambo | b66d61b656499dd31892e8cf5518f6da38b4b6a6 | [
"MIT"
] | 14 | 2020-02-14T16:07:38.000Z | 2022-03-17T08:04:31.000Z | mix.exs | kim-company/rambo | b66d61b656499dd31892e8cf5518f6da38b4b6a6 | [
"MIT"
] | 8 | 2019-09-06T08:45:41.000Z | 2021-07-29T11:34:12.000Z | defmodule Rambo.MixProject do
use Mix.Project
@version "0.3.4"
@repo_url "https://github.com/jayjun/rambo"
def project do
[
app: :rambo,
version: @version,
elixir: "~> 1.9",
name: "Rambo",
description: "Run your command. Send input. Get output.",
compilers: Mix.compilers() ++ [:rambo],
deps: deps(),
package: [
exclude_patterns: ["priv/target"],
licenses: ["MIT"],
links: %{"GitHub" => @repo_url}
],
docs: [
source_ref: @version,
source_url: @repo_url,
main: "Rambo",
api_reference: false,
extra_section: []
]
]
end
def application do
[
extra_applications: []
]
end
defp deps do
[
{:ex_doc, "~> 0.24", only: [:docs], runtime: false}
]
end
end
| 19.348837 | 63 | 0.52524 |
033eb886cc094df63f689d586bf665cba8a9d687 | 253 | ex | Elixir | lib/horizons.ex | BCrawfordScott/horizons | 04ee6ba579517e6a35c1347de4be1bceea8e4b36 | [
"MIT"
] | null | null | null | lib/horizons.ex | BCrawfordScott/horizons | 04ee6ba579517e6a35c1347de4be1bceea8e4b36 | [
"MIT"
] | null | null | null | lib/horizons.ex | BCrawfordScott/horizons | 04ee6ba579517e6a35c1347de4be1bceea8e4b36 | [
"MIT"
] | null | null | null | defmodule Horizons do
@moduledoc """
Horizons keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 25.3 | 66 | 0.754941 |
033edc76c59adaa3c79941ab4ee70a8c2637407c | 336 | exs | Elixir | combo/phxexibee/test/phxexibee_web/live/page_live_test.exs | exineris/shp_challenge2021 | 2a5447af681259d0ea699b670cf079bd31315cd8 | [
"Apache-2.0"
] | null | null | null | combo/phxexibee/test/phxexibee_web/live/page_live_test.exs | exineris/shp_challenge2021 | 2a5447af681259d0ea699b670cf079bd31315cd8 | [
"Apache-2.0"
] | null | null | null | combo/phxexibee/test/phxexibee_web/live/page_live_test.exs | exineris/shp_challenge2021 | 2a5447af681259d0ea699b670cf079bd31315cd8 | [
"Apache-2.0"
] | null | null | null | defmodule PhxexibeeWeb.PageLiveTest do
use PhxexibeeWeb.ConnCase
import Phoenix.LiveViewTest
test "disconnected and connected render", %{conn: conn} do
{:ok, page_live, disconnected_html} = live(conn, "/")
assert disconnected_html =~ "Welcome to Phoenix!"
assert render(page_live) =~ "Welcome to Phoenix!"
end
end
| 28 | 60 | 0.729167 |
033f04ce28182b6f3126f9fb7d1b72cd0d4e9b82 | 4,641 | ex | Elixir | clients/knowledge_graph_search/lib/google_api/knowledge_graph_search/v1/request_builder.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/knowledge_graph_search/lib/google_api/knowledge_graph_search/v1/request_builder.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/knowledge_graph_search/lib/google_api/knowledge_graph_search/v1/request_builder.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.KnowledgeGraphSearch.V1.RequestBuilder do
@moduledoc """
Helper functions for building Tesla requests
"""
@path_template_regex ~r/{(\+?[^}]+)}/i
@doc """
Specify the request method when building a request
## Parameters
- request (Map) - Collected request options
- m (String) - Request method
## Returns
Map
"""
@spec method(map(), String.t) :: map()
def method(request, m) do
Map.put_new(request, :method, m)
end
@doc """
Specify the request method when building a request
## Parameters
- request (Map) - Collected request options
- u (String) - Request URL
## Returns
Map
"""
@spec url(map(), String.t, Map.t) :: map()
def url(request, u, replacements) do
url(request, replace_path_template_vars(u, replacements))
end
def url(request, u) do
Map.put_new(request, :url, u)
end
def replace_path_template_vars(u, replacements) do
Regex.replace(@path_template_regex, u, fn (_, var) -> replacement_value(var, replacements) end)
end
defp replacement_value("+" <> name, replacements) do
URI.decode(replacement_value(name, replacements))
end
defp replacement_value(name, replacements) do
Map.get(replacements, name, "")
end
@doc """
Add optional parameters to the request
## Parameters
- request (Map) - Collected request options
- definitions (Map) - Map of parameter name to parameter location.
- options (KeywordList) - The provided optional parameters
## Returns
Map
"""
@spec add_optional_params(map(), %{optional(:atom) => :atom}, keyword()) :: map()
def add_optional_params(request, _, []), do: request
def add_optional_params(request, definitions, [{key, value} | tail]) do
case definitions do
%{^key => location} ->
request
|> add_param(location, key, value)
|> add_optional_params(definitions, tail)
_ ->
add_optional_params(request, definitions, tail)
end
end
@doc """
Add optional parameters to the request
## Parameters
- request (Map) - Collected request options
- location (atom) - Where to put the parameter
- key (atom) - The name of the parameter
- value (any) - The value of the parameter
## Returns
Map
"""
@spec add_param(map(), :atom, :atom, any()) :: map()
def add_param(request, :body, :body, value), do: Map.put(request, :body, value)
def add_param(request, :body, key, value) do
request
|> Map.put_new_lazy(:body, &Tesla.Multipart.new/0)
|> Map.update!(:body, &(Tesla.Multipart.add_field(&1, key, Poison.encode!(value), headers: [{:"Content-Type", "application/json"}])))
end
def add_param(request, :file, name, path) do
request
|> Map.put_new_lazy(:body, &Tesla.Multipart.new/0)
|> Map.update!(:body, &(Tesla.Multipart.add_file(&1, path, name: name)))
end
def add_param(request, :form, name, value) do
request
|> Map.update(:body, %{name => value}, &(Map.put(&1, name, value)))
end
def add_param(request, location, key, value) do
Map.update(request, location, [{key, value}], &(&1 ++ [{key, value}]))
end
@doc """
Handle the response for a Tesla request
## Parameters
- env (Tesla.Env) - The response object
- struct - The shape of the struct to deserialize into
## Returns
{:ok, struct} on success
{:error, info} on failure
"""
@spec decode(Tesla.Env.t) :: {:ok, struct()} | {:error, Tesla.Env.t}
def decode(%Tesla.Env{status: 200, body: body}), do: Poison.decode(body)
def decode(response) do
{:error, response}
end
@spec decode(Tesla.Env.t, struct()) :: {:ok, struct()} | {:error, Tesla.Env.t}
def decode(%Tesla.Env{status: 200} = env, false), do: {:ok, env}
def decode(%Tesla.Env{status: 200, body: body}, struct) do
Poison.decode(body, as: struct)
end
def decode(response, _struct) do
{:error, response}
end
end
| 28.826087 | 137 | 0.67033 |
033f3c32aa96b34923447bcf400d8cc119f06ff6 | 1,758 | ex | Elixir | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/track_info.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/track_info.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/track_info.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.AndroidEnterprise.V1.Model.TrackInfo do
@moduledoc """
Id to name association of a track.
## Attributes
* `trackAlias` (*type:* `String.t`, *default:* `nil`) - A modifiable name for a track. This is the visible name in the play developer console.
* `trackId` (*type:* `String.t`, *default:* `nil`) - Unmodifiable, unique track identifier. This identifier is the releaseTrackId in the url of the play developer console page that displays the track information.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:trackAlias => String.t(),
:trackId => String.t()
}
field(:trackAlias)
field(:trackId)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidEnterprise.V1.Model.TrackInfo do
def decode(value, options) do
GoogleApi.AndroidEnterprise.V1.Model.TrackInfo.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidEnterprise.V1.Model.TrackInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.16 | 216 | 0.733788 |
033f53d2c29f81756d78cd73fb37ced52cd137a6 | 1,387 | ex | Elixir | lib/teiserver_web/controllers/report/infolog_controller.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 6 | 2021-02-08T10:42:53.000Z | 2021-04-25T12:12:03.000Z | lib/teiserver_web/controllers/report/infolog_controller.ex | Jazcash/teiserver | fec14784901cb2965d8c1350fe84107c57451877 | [
"MIT"
] | 14 | 2021-08-01T02:36:14.000Z | 2022-01-30T21:15:03.000Z | lib/teiserver_web/controllers/report/infolog_controller.ex | Jazcash/teiserver | fec14784901cb2965d8c1350fe84107c57451877 | [
"MIT"
] | 7 | 2021-05-13T12:55:28.000Z | 2022-01-14T06:39:06.000Z | defmodule TeiserverWeb.Report.InfologController do
use CentralWeb, :controller
alias Teiserver.Telemetry
plug(AssignPlug,
sidemenu_active: ["teiserver"]
)
plug Bodyguard.Plug.Authorize,
policy: Teiserver.Telemetry.Infolog,
action: {Phoenix.Controller, :action_name},
user: {Central.Account.AuthLib, :current_user}
plug(:add_breadcrumb, name: 'Teiserver', url: '/teiserver')
plug(:add_breadcrumb, name: 'Reports', url: '/teiserver/reports')
plug(:add_breadcrumb, name: 'Infologs', url: '/teiserver/reports/infolog')
@spec index(Plug.Conn.t(), map) :: Plug.Conn.t()
def index(conn, _params) do
infologs = Telemetry.list_infologs(
search: [],
preload: [:user],
order_by: "Newest first"
)
conn
|> assign(:infologs, infologs)
|> render("index.html")
end
@spec show(Plug.Conn.t(), map) :: Plug.Conn.t()
def show(conn, %{"id" => id}) do
infolog = Telemetry.get_infolog(id, preload: [:user])
conn
|> assign(:infolog, infolog)
|> render("show.html")
end
@spec download(Plug.Conn.t(), map) :: Plug.Conn.t()
def download(conn, %{"id" => id}) do
infolog = Telemetry.get_infolog(id)
conn
|> put_resp_content_type("text/plain")
|> put_resp_header("content-disposition", "attachment; filename=\"infolog_#{infolog.id}.log\"")
|> send_resp(200, infolog.contents)
end
end
| 27.74 | 99 | 0.658976 |
033fa0dd8f3915b9eaf26485975900fe2b27db8c | 65 | ex | Elixir | testData/org/elixir_lang/parser_definition/matched_relational_operation_parsing_test_case/DecimalFloat.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/matched_relational_operation_parsing_test_case/DecimalFloat.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/matched_relational_operation_parsing_test_case/DecimalFloat.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | 1.2e-3 < 4.5e+6
1.2e-3 > 4.5e+6
1.2e-3 <= 4.5e+6
1.2e-3 >= 4.5e+6 | 16.25 | 16 | 0.492308 |
033fb1897557de9ca49f0ce01ba2274cefce17ba | 1,576 | ex | Elixir | clients/manufacturers/lib/google_api/manufacturers/v1/model/destination_status.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/manufacturers/lib/google_api/manufacturers/v1/model/destination_status.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/manufacturers/lib/google_api/manufacturers/v1/model/destination_status.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Manufacturers.V1.Model.DestinationStatus do
@moduledoc """
The destination status.
## Attributes
* `destination` (*type:* `String.t`, *default:* `nil`) - The name of the destination.
* `status` (*type:* `String.t`, *default:* `nil`) - The status of the destination.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:destination => String.t(),
:status => String.t()
}
field(:destination)
field(:status)
end
defimpl Poison.Decoder, for: GoogleApi.Manufacturers.V1.Model.DestinationStatus do
def decode(value, options) do
GoogleApi.Manufacturers.V1.Model.DestinationStatus.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Manufacturers.V1.Model.DestinationStatus do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.52 | 89 | 0.725888 |
033fd4f112d42691870b0c4069ce7810be957cf0 | 220 | ex | Elixir | apps/core/lib/core/api/behaviours/declaration_request_creator_behaviour.ex | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 8 | 2019-06-14T11:34:49.000Z | 2021-08-05T19:14:24.000Z | apps/core/lib/core/api/behaviours/declaration_request_creator_behaviour.ex | edenlabllc/ehealth.api.public | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 1 | 2019-07-08T15:20:22.000Z | 2019-07-08T15:20:22.000Z | apps/core/lib/core/api/behaviours/declaration_request_creator_behaviour.ex | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 6 | 2018-05-11T13:59:32.000Z | 2022-01-19T20:15:22.000Z | defmodule Core.DeclarationRequests.API.CreatorBehaviour do
@moduledoc false
@callback sql_get_sequence_number() ::
{:ok, %Postgrex.Result{rows: [[sequence :: integer]]}} | {:error, reason :: term}
end
| 31.428571 | 95 | 0.686364 |
034083751a79efb78a242f38ee7ea5e776884f04 | 1,961 | ex | Elixir | lib/policr_mini/counter.ex | gchengyu/policr-mini | 5acd7d6609fcaea2dbd7276fa01ca334ef9f6e6a | [
"MIT"
] | null | null | null | lib/policr_mini/counter.ex | gchengyu/policr-mini | 5acd7d6609fcaea2dbd7276fa01ca334ef9f6e6a | [
"MIT"
] | null | null | null | lib/policr_mini/counter.ex | gchengyu/policr-mini | 5acd7d6609fcaea2dbd7276fa01ca334ef9f6e6a | [
"MIT"
] | null | null | null | defmodule PolicrMini.Counter do
@moduledoc """
计数器缓存实现。
"""
use GenServer
import PolicrMini.Helper
alias PolicrMini.VerificationBusiness
alias :mnesia, as: Mnesia
def start_link(_opts) do
state = %{
verification_total: VerificationBusiness.find_total(),
verification_passed_total: VerificationBusiness.find_total(status: :passed),
verification_timeout_total: VerificationBusiness.find_total(status: :timeout)
}
GenServer.start_link(__MODULE__, state, name: __MODULE__)
end
@impl true
def init(state) do
%{
verification_total: verification_total,
verification_passed_total: verification_passed_total,
verification_timeout_total: verification_timeout_total
} = state
node_list = init_mnesia!()
table_result =
Mnesia.create_table(Counter,
attributes: [:key, :value],
ram_copies: node_list
)
check_mnesia_created_table!(table_result)
Mnesia.wait_for_tables([Counter], 2000)
Mnesia.dirty_write({Counter, :verification_total, verification_total})
Mnesia.dirty_write({Counter, :verification_passed_total, verification_passed_total})
Mnesia.dirty_write({Counter, :verification_timeout_total, verification_timeout_total})
{:ok, state}
end
@type key :: :verification_total | :verification_passed_total | :verification_timeout_total
@spec get(key) :: integer
def get(key) do
GenServer.call(__MODULE__, {:get_value, key})
end
@spec increment(key) :: :ok
def increment(key) do
GenServer.cast(__MODULE__, {:increment, key})
end
@impl true
def handle_call({:get_value, key}, _from, state) do
value =
case Mnesia.dirty_read(Counter, key) do
[{Counter, _key, value}] -> value
[] -> -1
end
{:reply, value, state}
end
@impl true
def handle_cast({:increment, key}, state) do
Mnesia.dirty_update_counter(Counter, key, 1)
{:noreply, state}
end
end
| 25.141026 | 93 | 0.702703 |
0340b04ddc1caad29a99cbfbd19f0d14facf3788 | 1,008 | ex | Elixir | lib/licensir/guesser.ex | bonfire-networks/licensir | 969a1beeadb7b79fb757a4b49cfd24d553fde024 | [
"MIT"
] | null | null | null | lib/licensir/guesser.ex | bonfire-networks/licensir | 969a1beeadb7b79fb757a4b49cfd24d553fde024 | [
"MIT"
] | null | null | null | lib/licensir/guesser.ex | bonfire-networks/licensir | 969a1beeadb7b79fb757a4b49cfd24d553fde024 | [
"MIT"
] | null | null | null | defmodule Licensir.Guesser do
@moduledoc """
A module that determines a dependency's license based on different sources gathered.
"""
alias Licensir.{License, NamingVariants}
@doc """
Guess the license based on the available license data.
"""
def guess(licenses) when is_list(licenses), do: Enum.map(licenses, &guess/1)
def guess(%License{} = license) do
hex_metadata_licenses = NamingVariants.normalize(license.hex_metadata)
file_licenses = NamingVariants.normalize(license.file)
conclusion = guess(hex_metadata_licenses, file_licenses)
Map.put(license, :license, conclusion)
end
defp guess([], nil), do: guess(nil, nil)
defp guess(nil, nil), do: "Undefined"
defp guess(file, ""), do: guess(file, nil)
defp guess(nil, file), do: file
defp guess(hex, nil) when length(hex) > 0, do: Enum.join(hex, "; ")
defp guess(hex, file) when length(hex) == 1 and hd(hex) == file, do: file
defp guess(hex, file) do
Enum.join(hex, "; ") <> "; " <> file
end
end
| 32.516129 | 86 | 0.685516 |
0340b49eff7b5803a7666bd74284d1acda237c05 | 1,767 | ex | Elixir | clients/content/lib/google_api/content/v2/model/shippingsettings_list_response.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/shippingsettings_list_response.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/shippingsettings_list_response.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Content.V2.Model.ShippingsettingsListResponse do
@moduledoc """
## Attributes
- kind (String): Identifies what kind of resource this is. Value: the fixed string \"content#shippingsettingsListResponse\". Defaults to: `null`.
- nextPageToken (String): The token for the retrieval of the next page of shipping settings. Defaults to: `null`.
- resources (List[ShippingSettings]): Defaults to: `null`.
"""
defstruct [
:"kind",
:"nextPageToken",
:"resources"
]
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.ShippingsettingsListResponse do
import GoogleApi.Content.V2.Deserializer
def decode(value, options) do
value
|> deserialize(:"resources", :list, GoogleApi.Content.V2.Model.ShippingSettings, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.ShippingsettingsListResponse do
def encode(value, options) do
GoogleApi.Content.V2.Deserializer.serialize_non_nil(value, options)
end
end
| 33.980769 | 157 | 0.752122 |
0340c88d9fae169ac1fc26f70be8026101ad3f03 | 15,120 | ex | Elixir | lib/phoenix_live_view/diff.ex | yosatak/phoenix_live_view | d6b45321919c3faa09af0fb90dfe3d4b9831f155 | [
"MIT"
] | null | null | null | lib/phoenix_live_view/diff.ex | yosatak/phoenix_live_view | d6b45321919c3faa09af0fb90dfe3d4b9831f155 | [
"MIT"
] | null | null | null | lib/phoenix_live_view/diff.ex | yosatak/phoenix_live_view | d6b45321919c3faa09af0fb90dfe3d4b9831f155 | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveView.Diff do
# The diff engine is responsible for tracking the rendering state.
# Given that components are part of said state, they are also
# handled here.
@moduledoc false
alias Phoenix.LiveView.{Utils, Rendered, Comprehension, Component}
@components :c
@static :s
@dynamics :d
@doc """
Returns the diff component state.
"""
def new_components(uuids \\ 0) do
{_ids_to_state = %{}, _cids_to_id = %{}, uuids}
end
@doc """
Returns the diff fingerprint state.
"""
def new_fingerprints do
{nil, %{}}
end
@doc """
Converts a diff into iodata.
It only acepts a full render diff.
"""
def to_iodata(map) do
to_iodata(map, Map.get(map, @components, %{}))
end
defp to_iodata(%{@dynamics => dynamics, @static => static}, components) do
for dynamic <- dynamics do
comprehension_to_iodata(static, dynamic, components)
end
end
defp to_iodata(%{@static => static} = parts, components) do
parts_to_iodata(static, parts, 0, components)
end
defp to_iodata(int, components) when is_integer(int) do
to_iodata(Map.fetch!(components, int), components)
end
defp to_iodata(binary, _components) when is_binary(binary) do
binary
end
defp parts_to_iodata([last], _parts, _counter, _components) do
[last]
end
defp parts_to_iodata([head | tail], parts, counter, components) do
[
head,
to_iodata(Map.fetch!(parts, counter), components)
| parts_to_iodata(tail, parts, counter + 1, components)
]
end
defp comprehension_to_iodata([static_head | static_tail], [dyn_head | dyn_tail], components) do
[
static_head,
to_iodata(dyn_head, components)
| comprehension_to_iodata(static_tail, dyn_tail, components)
]
end
defp comprehension_to_iodata([static_head], [], _components) do
[static_head]
end
@doc """
Renders a diff for the rendered struct in regards to the given socket.
"""
def render(
%{fingerprints: {expected, _}} = socket,
%Rendered{fingerprint: actual} = rendered,
{_, _, uuids}
)
when expected != nil and expected != actual do
render(%{socket | fingerprints: new_fingerprints()}, rendered, new_components(uuids))
end
def render(%{fingerprints: prints} = socket, %Rendered{} = rendered, components) do
{diff, prints, pending_components, components} =
traverse(socket, rendered, prints, %{}, components)
{component_diffs, components} =
render_pending_components(socket, pending_components, %{}, %{}, components)
socket = %{socket | fingerprints: prints}
diff =
if Utils.changed?(socket, :page_title) do
Map.put(diff, :title, socket.assigns.page_title)
else
diff
end
if map_size(component_diffs) == 0 do
{socket, diff, components}
else
{socket, Map.put(diff, @components, component_diffs), components}
end
end
@doc """
Execute the `fun` with the component `cid` with the given `socket` as template.
It will store the result under the `cid` key in the `component_diffs` map.
It returns the updated `component_diffs` and the updated `components` or
`:error` if the component cid does not exist.
"""
def with_component(socket, cid, component_diffs, components, fun) when is_integer(cid) do
{id_to_components, cid_to_ids, _} = components
case cid_to_ids do
%{^cid => {component, _} = id} ->
{^cid, assigns, private, fingerprints} = Map.fetch!(id_to_components, id)
{component_socket, extra} =
socket
|> configure_socket_for_component(assigns, private, fingerprints)
|> fun.(component)
{pending_components, component_diffs, components} =
render_component(component_socket, id, cid, false, %{}, component_diffs, components)
{component_diffs, components} =
render_pending_components(socket, pending_components, %{}, component_diffs, components)
{%{@components => component_diffs}, components, extra}
%{} ->
:error
end
end
@doc """
Sends an update to a component.
Like `with_component/5`, it will store the result under the `cid
key in the `component_diffs` map.
If the component exists, a `{:diff, component_diff, updated_components}` tuple
is returned. Otherwise, `:noop` is returned.
The component is preloaded before the update callback is invoked.
## Example
{:diff, diff, new_components} = Diff.update_components(socket, state.components, update)
"""
def update_component(socket, components, {module, id, updated_assigns}) do
case fetch_cid(module, id, components) do
{:ok, cid} ->
updated_assigns = maybe_call_preload!(module, updated_assigns)
{diff, new_components, :noop} =
with_component(socket, cid, %{}, components, fn component_socket, component ->
{Utils.maybe_call_update!(component_socket, component, updated_assigns), :noop}
end)
{diff, new_components}
:error ->
:noop
end
end
@doc """
Deletes a component by `cid`.
"""
def delete_component(cid, {id_to_components, cid_to_ids, uuids}) do
{id, cid_to_ids} = Map.pop(cid_to_ids, cid)
{Map.delete(id_to_components, id), cid_to_ids, uuids}
end
@doc """
Converts a component to a rendered struct.
"""
def component_to_rendered(socket, component, assigns) when is_map(assigns) do
socket = mount_component(socket, component)
assigns = maybe_call_preload!(component, assigns)
socket
|> Utils.maybe_call_update!(component, assigns)
|> Utils.to_rendered(component)
end
## Traversal
defp traverse(
socket,
%Rendered{fingerprint: fingerprint, dynamic: dynamic},
{fingerprint, children},
pending_components,
components
) do
{_counter, diff, children, pending_components, components} =
traverse_dynamic(socket, dynamic.(true), children, pending_components, components)
{diff, {fingerprint, children}, pending_components, components}
end
defp traverse(
socket,
%Rendered{fingerprint: fingerprint, static: static, dynamic: dynamic},
_,
pending_components,
components
) do
{_counter, diff, children, pending_components, components} =
traverse_dynamic(socket, dynamic.(false), %{}, pending_components, components)
{Map.put(diff, @static, static), {fingerprint, children}, pending_components, components}
end
defp traverse(
socket,
%Component{id: nil, component: component, assigns: assigns},
fingerprints_tree,
pending_components,
components
) do
rendered = component_to_rendered(socket, component, assigns)
traverse(socket, rendered, fingerprints_tree, pending_components, components)
end
defp traverse(
socket,
%Component{} = component,
fingerprints_tree,
pending_components,
components
) do
{cid, pending_components, components} =
traverse_component(socket, component, pending_components, components)
{cid, fingerprints_tree, pending_components, components}
end
defp traverse(
socket,
%Comprehension{dynamics: dynamics, fingerprint: fingerprint},
fingerprint,
pending_components,
components
) do
{dynamics, {pending_components, components}} =
comprehension_to_iodata(socket, dynamics, pending_components, components)
{%{@dynamics => dynamics}, fingerprint, pending_components, components}
end
defp traverse(
socket,
%Comprehension{static: static, dynamics: dynamics, fingerprint: fingerprint},
_,
pending_components,
components
) do
{dynamics, {pending_components, components}} =
comprehension_to_iodata(socket, dynamics, pending_components, components)
{%{@dynamics => dynamics, @static => static}, fingerprint, pending_components, components}
end
defp traverse(_socket, nil, fingerprint_tree, pending_components, components) do
{nil, fingerprint_tree, pending_components, components}
end
defp traverse(_socket, iodata, _, pending_components, components) do
{IO.iodata_to_binary(iodata), nil, pending_components, components}
end
defp traverse_dynamic(socket, dynamic, children, pending_components, components) do
Enum.reduce(dynamic, {0, %{}, children, pending_components, components}, fn
entry, {counter, diff, children, pending_components, components} ->
{serialized, child_fingerprint, pending_components, components} =
traverse(socket, entry, Map.get(children, counter), pending_components, components)
diff =
if serialized do
Map.put(diff, counter, serialized)
else
diff
end
children =
if child_fingerprint do
Map.put(children, counter, child_fingerprint)
else
Map.delete(children, counter)
end
{counter + 1, diff, children, pending_components, components}
end)
end
defp comprehension_to_iodata(socket, dynamics, pending_components, components) do
Enum.map_reduce(dynamics, {pending_components, components}, fn list, acc ->
Enum.map_reduce(list, acc, fn rendered, {pending_components, components} ->
{diff, _, pending_components, components} =
traverse(socket, rendered, {nil, %{}}, pending_components, components)
{diff, {pending_components, components}}
end)
end)
end
## Stateful components helpers
defp traverse_component(
socket,
%Component{id: id, assigns: assigns, component: component},
pending_components,
components
) do
{cid, new?, components} = ensure_component(socket, {component, id}, components)
entry = {id, new?, assigns}
pending_components = Map.update(pending_components, component, [entry], &[entry | &1])
{cid, pending_components, components}
end
defp ensure_component(socket, {component, _} = id, {id_to_components, cid_to_ids, uuids}) do
case id_to_components do
%{^id => {cid, _assigns, _private, _component_prints}} ->
{cid, false, {id_to_components, cid_to_ids, uuids}}
%{} ->
cid = uuids
socket = mount_component(socket, component)
id_to_components = Map.put(id_to_components, id, dump_component(socket, cid))
cid_to_ids = Map.put(cid_to_ids, cid, id)
{cid, true, {id_to_components, cid_to_ids, uuids + 1}}
end
end
defp mount_component(socket, component) do
socket =
configure_socket_for_component(
socket,
%{},
Map.take(socket.private, [:conn_session]),
new_fingerprints()
)
|> Utils.assign(:flash, %{})
Utils.maybe_call_mount!(socket, component, [socket])
end
defp configure_socket_for_component(socket, assigns, private, prints) do
%{
socket
| assigns: assigns,
private: private,
fingerprints: prints,
changed: %{}
}
end
defp dump_component(socket, cid) do
{cid, socket.assigns, socket.private, socket.fingerprints}
end
## Component rendering
defp render_pending_components(_, pending_components, _seen_ids, component_diffs, components)
when map_size(pending_components) == 0 do
{component_diffs, components}
end
defp render_pending_components(
socket,
pending_components,
seen_ids,
component_diffs,
components
) do
{id_to_components, _, _} = components
acc = {{%{}, component_diffs, components}, seen_ids}
{{pending_components, component_diffs, components}, seen_ids} =
Enum.reduce(pending_components, acc, fn {component, entries}, acc ->
entries = maybe_preload_components(component, Enum.reverse(entries))
Enum.reduce(entries, acc, fn {id, new?, new_assigns}, {triplet, seen_ids} ->
{pending_components, component_diffs, components} = triplet
id = {component, id}
%{^id => {cid, assigns, private, component_prints}} = id_to_components
if Map.has_key?(seen_ids, id) do
raise "found duplicate ID #{inspect(elem(id, 1))} " <>
"for component #{inspect(elem(id, 0))} when rendering template"
end
triplet =
socket
|> configure_socket_for_component(assigns, private, component_prints)
|> Utils.maybe_call_update!(component, new_assigns)
|> render_component(id, cid, new?, pending_components, component_diffs, components)
{triplet, Map.put(seen_ids, id, true)}
end)
end)
render_pending_components(socket, pending_components, seen_ids, component_diffs, components)
end
defp maybe_preload_components(component, entries) do
if function_exported?(component, :preload, 1) do
list_of_assigns = Enum.map(entries, fn {_id, _new?, new_assigns} -> new_assigns end)
result = component.preload(list_of_assigns)
zip_preloads(result, entries, component, result)
else
entries
end
end
defp maybe_call_preload!(module, assigns) do
if function_exported?(module, :preload, 1) do
[new_assigns] = module.preload([assigns])
new_assigns
else
assigns
end
end
defp zip_preloads([new_assigns | assigns], [{id, new?, _} | entries], component, preloaded)
when is_map(new_assigns) do
[{id, new?, new_assigns} | zip_preloads(assigns, entries, component, preloaded)]
end
defp zip_preloads([], [], _component, _preloaded) do
[]
end
defp zip_preloads(_, _, component, preloaded) do
raise ArgumentError,
"expected #{inspect(component)}.preload/1 to return a list of maps of the same length " <>
"as the list of assigns given, got: #{inspect(preloaded)}"
end
defp render_component(socket, id, cid, new?, pending_components, component_diffs, components) do
{component, _} = id
{socket, pending_components, component_diffs, {id_to_components, cid_to_ids, uuids}} =
if new? or Utils.changed?(socket) do
rendered = Utils.to_rendered(socket, component)
{diff, component_prints, pending_components, components} =
traverse(socket, rendered, socket.fingerprints, pending_components, components)
socket = Utils.clear_changed(%{socket | fingerprints: component_prints})
{socket, pending_components, Map.put(component_diffs, cid, diff), components}
else
{socket, pending_components, component_diffs, components}
end
id_to_components = Map.put(id_to_components, id, dump_component(socket, cid))
{pending_components, component_diffs, {id_to_components, cid_to_ids, uuids}}
end
defp fetch_cid(module, id, {id_to_components, _cid_to_ids, _} = _components) do
case Map.fetch(id_to_components, {module, id}) do
{:ok, {cid, _, _, _}} -> {:ok, cid}
:error -> :error
end
end
end
| 31.565762 | 100 | 0.665013 |
0340da7e11098b1eabecb86266cc5ab0349dc2ec | 2,000 | exs | Elixir | config/dev.exs | lulabad/murky | 8cf7b24c7d36c3b035da05cc289dac957e95e3d7 | [
"MIT"
] | 6 | 2020-08-17T18:49:15.000Z | 2021-11-21T08:22:06.000Z | config/dev.exs | lulabad/murky | 8cf7b24c7d36c3b035da05cc289dac957e95e3d7 | [
"MIT"
] | 5 | 2020-09-22T16:46:38.000Z | 2021-12-19T18:04:19.000Z | config/dev.exs | lulabad/murky | 8cf7b24c7d36c3b035da05cc289dac957e95e3d7 | [
"MIT"
] | 2 | 2021-11-22T20:12:11.000Z | 2021-12-19T18:12:01.000Z | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :murky, MurkyWeb.Endpoint,
http: [port: 5000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch",
"--watch-options-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
config :murky,
storage_path: System.get_env("STORAGE_PATH") || "wiki/"
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :murky, MurkyWeb.Endpoint,
live_reload: [
patterns: [
~r"priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$",
~r"priv/gettext/.*(po)$",
~r"lib/murky_web/(live|views)/.*(ex)$",
~r"lib/murky_web/templates/.*(eex)$"
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 27.777778 | 68 | 0.68 |
0340e239dc8acf58c4a81a330ffbd054dfdaa9a4 | 744 | exs | Elixir | chapter2/exlivery/mix.exs | mCodex/rocketseat-ignite-elixir | bdb48db778c36b2325c75a41b4d6f7ef77b03cf5 | [
"MIT"
] | 1 | 2021-07-23T19:48:27.000Z | 2021-07-23T19:48:27.000Z | chapter2/exlivery/mix.exs | mCodex/rocketseat-ignite-elixir | bdb48db778c36b2325c75a41b4d6f7ef77b03cf5 | [
"MIT"
] | null | null | null | chapter2/exlivery/mix.exs | mCodex/rocketseat-ignite-elixir | bdb48db778c36b2325c75a41b4d6f7ef77b03cf5 | [
"MIT"
] | null | null | null | defmodule Exlivery.MixProject do
use Mix.Project
def project do
[
app: :exlivery,
version: "0.1.0",
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
elixirc_paths: elixirc_paths(Mix.env()),
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:decimal, "~> 2.0"},
{:ex_machina, "~> 2.7.0"},
{:elixir_uuid, "~> 1.2"}
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
end
| 21.257143 | 62 | 0.561828 |
0340e698515d518804b2a6bbd5d2cce2ffd5532c | 769 | exs | Elixir | ex_ske/mix.exs | ohr486/chat-app-proto | 95484ea9252d3399d9b09412525de13dcd17497a | [
"MIT"
] | null | null | null | ex_ske/mix.exs | ohr486/chat-app-proto | 95484ea9252d3399d9b09412525de13dcd17497a | [
"MIT"
] | null | null | null | ex_ske/mix.exs | ohr486/chat-app-proto | 95484ea9252d3399d9b09412525de13dcd17497a | [
"MIT"
] | null | null | null | defmodule Chat.Mixfile do
use Mix.Project
def project do
[app: :chat,
version: "0.1.0",
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[extra_applications: [:logger],
mod: {Chat.Application, []}]
end
# Dependencies can be Hex packages:
#
# {:my_dep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:my_dep, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[]
end
end
| 21.971429 | 79 | 0.617685 |
0340e8e3ebdaa24a975042cae137fc5cadc9b89d | 284 | ex | Elixir | lib/zaryn/p2p/message/get_balance.ex | ambareesha7/node-zaryn | 136e542801bf9b6fa4a015d3464609fdf3dacee8 | [
"Apache-2.0"
] | 1 | 2021-07-06T19:47:14.000Z | 2021-07-06T19:47:14.000Z | lib/zaryn/p2p/message/get_balance.ex | ambareesha7/node-zaryn | 136e542801bf9b6fa4a015d3464609fdf3dacee8 | [
"Apache-2.0"
] | null | null | null | lib/zaryn/p2p/message/get_balance.ex | ambareesha7/node-zaryn | 136e542801bf9b6fa4a015d3464609fdf3dacee8 | [
"Apache-2.0"
] | null | null | null | defmodule Zaryn.P2P.Message.GetBalance do
@moduledoc """
Represents a message to request the balance of a transaction
"""
@enforce_keys [:address]
defstruct [:address]
alias Zaryn.Crypto
@type t :: %__MODULE__{
address: Crypto.versioned_hash()
}
end
| 20.285714 | 62 | 0.676056 |
0340eb672c6e268db6e4f53af96b2837401292ff | 462 | ex | Elixir | backend/apps/students_crm_v2_web/lib/students_crm_v2_web/views/error_view.ex | KyivKrishnaAcademy/students_crm_v2 | e0ad9b3c5e52dfef5ab8f9179f3c593f935786e6 | [
"MIT"
] | null | null | null | backend/apps/students_crm_v2_web/lib/students_crm_v2_web/views/error_view.ex | KyivKrishnaAcademy/students_crm_v2 | e0ad9b3c5e52dfef5ab8f9179f3c593f935786e6 | [
"MIT"
] | 50 | 2018-07-29T09:17:35.000Z | 2019-02-26T05:23:34.000Z | backend/apps/students_crm_v2_web/lib/students_crm_v2_web/views/error_view.ex | KyivKrishnaAcademy/students_crm_v2 | e0ad9b3c5e52dfef5ab8f9179f3c593f935786e6 | [
"MIT"
] | null | null | null | defmodule StudentsCrmV2Web.ErrorView do
use StudentsCrmV2Web, :view
def render("404.html", _assigns) do
"Page not found"
end
def render("500.html", _assigns) do
"Internal server error"
end
def render("401.json", _assigns) do
%{error: :unauthorized}
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render("500.html", assigns)
end
end
| 21 | 47 | 0.69697 |
0340feadcf4928150fb101cebe413e1efd8397f7 | 30 | ex | Elixir | lib/ex_acn/sdt.ex | willpenington/eacn | 774f873c28cb070a2f8313bf7557cb81d1f10bd9 | [
"MIT"
] | 1 | 2016-08-28T21:58:57.000Z | 2016-08-28T21:58:57.000Z | lib/ex_acn/sdt.ex | willpenington/eacn | 774f873c28cb070a2f8313bf7557cb81d1f10bd9 | [
"MIT"
] | null | null | null | lib/ex_acn/sdt.ex | willpenington/eacn | 774f873c28cb070a2f8313bf7557cb81d1f10bd9 | [
"MIT"
] | null | null | null | defmodule ExACN.SDT do
end
| 7.5 | 22 | 0.733333 |
03410cb31d4f7c424fcbcf818bc8afc0209c6218 | 1,418 | ex | Elixir | server/apps/boardr_api/lib/boardr_api/endpoint.ex | AlphaHydrae/boardr | 98eed02801f88c065a24bf13051c5cf96270a5f7 | [
"MIT"
] | 1 | 2021-04-08T17:26:27.000Z | 2021-04-08T17:26:27.000Z | server/apps/boardr_api/lib/boardr_api/endpoint.ex | AlphaHydrae/boardr | 98eed02801f88c065a24bf13051c5cf96270a5f7 | [
"MIT"
] | 1 | 2022-02-13T05:50:46.000Z | 2022-02-13T05:50:46.000Z | server/apps/boardr_api/lib/boardr_api/endpoint.ex | AlphaHydrae/boardr | 98eed02801f88c065a24bf13051c5cf96270a5f7 | [
"MIT"
] | null | null | null | defmodule BoardrApi.Endpoint do
use Phoenix.Endpoint, otp_app: :boardr
alias Boardr.Config
def init(:supervisor, config) do
with {:ok, port} <-
Config.get_required_env(
"BOARDR_PORT",
:port_missing,
System.get_env("PORT", "4000")
),
{:ok, valid_port} <-
Config.parse_port(port, :port_invalid),
base_url = System.get_env("BOARDR_BASE_URL", "http://localhost:#{valid_port}"),
{:ok, base_url_options} <-
Config.parse_http_url(base_url, :base_url_invalid) do
{
:ok,
Keyword.merge(
config,
http: [
port: valid_port
],
url: base_url_options |> Map.take([:host, :path, :port, :scheme]) |> Map.to_list()
)
}
end
end
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
plug(Phoenix.CodeReloader)
end
plug(Plug.RequestId)
plug(Plug.Telemetry, event_prefix: [:phoenix, :endpoint])
plug(Plug.Head)
plug(Corsica, allow_headers: ["authorization", "content-type"], origins: {__MODULE__, :allow_origin})
plug(BoardrApi.Router)
def allow_origin(origin) do
allowed_origins =
:boardr
|> Application.fetch_env!(__MODULE__)
|> Keyword.fetch!(:allowed_origins)
origin in allowed_origins
end
end
| 26.754717 | 103 | 0.61213 |
03410fb41fb216bac1a99dd08b89371812d58843 | 405 | ex | Elixir | apps/api_web/lib/api_web/views/service_view.ex | losvedir/api | a9bacff911704f53fac238c0eb23b7d6d6d3b70e | [
"MIT"
] | null | null | null | apps/api_web/lib/api_web/views/service_view.ex | losvedir/api | a9bacff911704f53fac238c0eb23b7d6d6d3b70e | [
"MIT"
] | null | null | null | apps/api_web/lib/api_web/views/service_view.ex | losvedir/api | a9bacff911704f53fac238c0eb23b7d6d6d3b70e | [
"MIT"
] | null | null | null | defmodule ApiWeb.ServiceView do
use ApiWeb.Web, :api_view
location("/services/:id")
attributes([
:start_date,
:end_date,
:valid_days,
:description,
:schedule_name,
:schedule_type,
:schedule_typicality,
:rating_start_date,
:rating_end_date,
:rating_description,
:added_dates,
:added_dates_notes,
:removed_dates,
:removed_dates_notes
])
end
| 17.608696 | 31 | 0.674074 |
03412aedb446aa334d589c3f7cc30b181b4dad9a | 514 | ex | Elixir | apps/chat/lib/chat/room/supervisor.ex | leohahn/chat_server | 77ef971e6c09d0051541fdc44ec5cef5dd82b82b | [
"Unlicense"
] | 1 | 2016-08-17T09:09:44.000Z | 2016-08-17T09:09:44.000Z | apps/chat/lib/chat/room/supervisor.ex | leohahn/chat_server | 77ef971e6c09d0051541fdc44ec5cef5dd82b82b | [
"Unlicense"
] | null | null | null | apps/chat/lib/chat/room/supervisor.ex | leohahn/chat_server | 77ef971e6c09d0051541fdc44ec5cef5dd82b82b | [
"Unlicense"
] | null | null | null | defmodule Chat.Room.Supervisor do
use Supervisor
@name Chat.Room.Supervisor
def start_link do
Supervisor.start_link(__MODULE__, :ok, name: @name)
end
@doc """
Creates a new room, with `admin` as the first user.
"""
def start_room(chat_name, admin, admin_pid) do
Supervisor.start_child(@name, [chat_name, admin, admin_pid])
end
def init(:ok) do
children = [
worker(Chat.Room, [], restart: :temporary)
]
supervise(children, strategy: :simple_one_for_one)
end
end
| 20.56 | 64 | 0.680934 |
034146fe3b24c0c1db96276be0e885e9991b6e37 | 243 | ex | Elixir | lib/xler/native.ex | thiamsantos/xler | 6f3ccbb536c42971a0f98dfd6f13970d32983d89 | [
"MIT"
] | 4 | 2020-09-04T14:57:22.000Z | 2021-09-03T13:34:08.000Z | lib/xler/native.ex | thiamsantos/xler | 6f3ccbb536c42971a0f98dfd6f13970d32983d89 | [
"MIT"
] | 20 | 2020-07-18T10:38:05.000Z | 2022-03-24T04:03:57.000Z | lib/xler/native.ex | thiamsantos/xler | 6f3ccbb536c42971a0f98dfd6f13970d32983d89 | [
"MIT"
] | 3 | 2020-10-20T12:38:48.000Z | 2022-01-25T05:33:32.000Z | defmodule Xler.Native do
use Rustler, otp_app: :xler, crate: :xler_native
@moduledoc false
def parse(_filename, _worksheet), do: error()
def worksheets(_filename), do: error()
defp error, do: :erlang.nif_error(:nif_not_loaded)
end
| 24.3 | 52 | 0.73251 |
03414c7392d10f1a2d11162eb161fb1252169323 | 25,516 | exs | Elixir | spec/assertions/dates_times/be_close_to_spec.exs | bblaszkow06/espec | 4d9819ca5c68c6eb70276c7d9c9630ded01ba778 | [
"Apache-2.0"
] | null | null | null | spec/assertions/dates_times/be_close_to_spec.exs | bblaszkow06/espec | 4d9819ca5c68c6eb70276c7d9c9630ded01ba778 | [
"Apache-2.0"
] | null | null | null | spec/assertions/dates_times/be_close_to_spec.exs | bblaszkow06/espec | 4d9819ca5c68c6eb70276c7d9c9630ded01ba778 | [
"Apache-2.0"
] | null | null | null | defmodule ESpec.Assertions.DatesTimes.BeCloseToSpec do
use ESpec, async: true
describe "ESpec.Assertions.BeCloseTo" do
context "Success with Date with a granularity of years" do
it "checks success with `to`" do
message = expect(~D[2017-08-07]).to(be_close_to(~D[2018-08-07], {:years, 1}))
expect(message)
|> to(eq "`~D[2017-08-07]` is close to `~D[2018-08-07]` with delta `{:years, 1}`.")
end
it "checks success with `not_to`" do
message = expect(~D[2017-08-07]).to_not(be_close_to(~D[2020-08-07], {:years, 2}))
expect(message)
|> to(eq "`~D[2017-08-07]` is not close to `~D[2020-08-07]` with delta `{:years, 2}`.")
end
it do: expect(~D[2017-08-07]).to(be_close_to(~D[2020-08-07], {:years, 3}))
end
context "Success with Date with a granularity of months" do
it "checks success with `to`" do
message = expect(~D[2017-08-07]).to(be_close_to(~D[2017-09-07], {:months, 1}))
expect(message)
|> to(eq "`~D[2017-08-07]` is close to `~D[2017-09-07]` with delta `{:months, 1}`.")
end
it "checks success with `not_to`" do
message = expect(~D[2017-08-07]).to_not(be_close_to(~D[2020-08-07], {:months, 2}))
expect(message)
|> to(eq "`~D[2017-08-07]` is not close to `~D[2020-08-07]` with delta `{:months, 2}`.")
end
it do: expect(~D[2017-08-07]).to(be_close_to(~D[2017-01-07], {:months, 7}))
end
context "Success with Date with a granularity of weeks" do
it "checks success with `to`" do
message = expect(~D[2017-08-07]).to(be_close_to(~D[2017-08-14], {:weeks, 1}))
expect(message)
|> to(eq "`~D[2017-08-07]` is close to `~D[2017-08-14]` with delta `{:weeks, 1}`.")
end
it "checks success with `not_to`" do
message = expect(~D[2017-08-07]).to_not(be_close_to(~D[2020-08-07], {:weeks, 2}))
expect(message)
|> to(eq "`~D[2017-08-07]` is not close to `~D[2020-08-07]` with delta `{:weeks, 2}`.")
end
it do: expect(~D[2017-08-07]).to(be_close_to(~D[2017-08-14], {:weeks, 1}))
end
context "Success with Date with a granularity of days" do
it "checks success with `to`" do
message = expect(~D[2017-08-07]).to(be_close_to(~D[2017-08-06], {:days, 1}))
expect(message)
|> to(eq "`~D[2017-08-07]` is close to `~D[2017-08-06]` with delta `{:days, 1}`.")
end
it "checks success with `not_to`" do
message = expect(~D[2017-08-07]).to_not(be_close_to(~D[2017-08-19], {:days, 1}))
expect(message)
|> to(eq "`~D[2017-08-07]` is not close to `~D[2017-08-19]` with delta `{:days, 1}`.")
end
it do: expect(~D[2017-08-07]).to(be_close_to(~D[2017-10-07], {:days, 61}))
end
context "Errors with Date" do
context "with `to`" do
before do
{:shared,
expectation: fn ->
expect(~D[2017-08-07]).to(be_close_to(~D[2050-08-19], {:years, 3}))
end,
message:
"Expected `~D[2017-08-07]` to be close to `~D[2050-08-19]` with delta `{:years, 3}`, but it isn't. The actual delta is {:years, 33}."}
end
it_behaves_like(CheckErrorSharedSpec)
end
context "with `not_to`" do
before do
{:shared,
expectation: fn ->
expect(~D[2017-08-07]).to_not(be_close_to(~D[2017-10-07], {:months, 1}))
end,
message:
"Expected `~D[2017-08-07]` not to be close to `~D[2017-10-07]` with delta `{:months, 1}`, but it is. The actual delta is {:months, 2}"}
end
it_behaves_like(CheckErrorSharedSpec)
end
end
context "Success with NaiveDateTime with a granularity of years" do
it "checks success with `to`" do
message =
expect(~N[2017-08-07 01:10:10]).to(be_close_to(~N[2018-08-07 01:10:10], {:years, 1}))
expect(message)
|> to(
eq "`~N[2017-08-07 01:10:10]` is close to `~N[2018-08-07 01:10:10]` with delta `{:years, 1}`."
)
end
it "checks success with `not_to`" do
message =
expect(~N[2017-08-07 01:10:10]).to_not(
be_close_to(~N[2020-08-07 01:10:10], {:years, 2})
)
expect(message)
|> to(
eq "`~N[2017-08-07 01:10:10]` is not close to `~N[2020-08-07 01:10:10]` with delta `{:years, 2}`."
)
end
it do: expect(~N[2017-08-07 01:10:10]).to(be_close_to(~N[2020-08-07 01:10:10], {:years, 3}))
end
context "Success with NaiveDateTime with a granularity of months" do
it "checks success with `to`" do
message =
expect(~N[2017-08-07 01:10:10]).to(be_close_to(~N[2017-09-07 01:10:10], {:months, 1}))
expect(message)
|> to(
eq "`~N[2017-08-07 01:10:10]` is close to `~N[2017-09-07 01:10:10]` with delta `{:months, 1}`."
)
end
it "checks success with `not_to`" do
message =
expect(~N[2017-08-07 01:10:10]).to_not(
be_close_to(~N[2020-08-07 01:10:10], {:months, 2})
)
expect(message)
|> to(
eq "`~N[2017-08-07 01:10:10]` is not close to `~N[2020-08-07 01:10:10]` with delta `{:months, 2}`."
)
end
it do:
expect(~N[2017-08-07 01:10:10]).to(be_close_to(~N[2017-01-07 01:10:10], {:months, 7}))
end
context "Success with NaiveDateTime with a granularity of weeks" do
it "checks success with `to`" do
message =
expect(~N[2017-08-07 01:10:10]).to(be_close_to(~N[2017-08-14 01:10:10], {:weeks, 1}))
expect(message)
|> to(
eq "`~N[2017-08-07 01:10:10]` is close to `~N[2017-08-14 01:10:10]` with delta `{:weeks, 1}`."
)
end
it "checks success with `not_to`" do
message =
expect(~N[2017-08-07 01:10:10]).to_not(
be_close_to(~N[2020-08-07 01:10:10], {:weeks, 2})
)
expect(message)
|> to(
eq "`~N[2017-08-07 01:10:10]` is not close to `~N[2020-08-07 01:10:10]` with delta `{:weeks, 2}`."
)
end
it do: expect(~N[2017-08-07 01:10:10]).to(be_close_to(~N[2017-08-14 01:10:10], {:weeks, 1}))
end
context "Success with NaiveDateTime with a granularity of days" do
it "checks success with `to`" do
message =
expect(~N[2017-08-07 01:10:10]).to(be_close_to(~N[2017-08-06 01:10:10], {:days, 1}))
expect(message)
|> to(
eq "`~N[2017-08-07 01:10:10]` is close to `~N[2017-08-06 01:10:10]` with delta `{:days, 1}`."
)
end
it "checks success with `not_to`" do
message =
expect(~N[2017-08-07 01:10:10]).to_not(be_close_to(~N[2017-08-19 01:10:10], {:days, 1}))
expect(message)
|> to(
eq "`~N[2017-08-07 01:10:10]` is not close to `~N[2017-08-19 01:10:10]` with delta `{:days, 1}`."
)
end
it do: expect(~N[2017-08-07 01:10:10]).to(be_close_to(~N[2017-10-07 01:10:10], {:days, 61}))
end
context "Success with NaiveDateTime with a granularity of hours" do
it "checks success with `to`" do
message =
expect(~N[2017-08-07 01:10:10]).to(be_close_to(~N[2017-08-07 02:10:10], {:hours, 1}))
expect(message)
|> to(
eq "`~N[2017-08-07 01:10:10]` is close to `~N[2017-08-07 02:10:10]` with delta `{:hours, 1}`."
)
end
it "checks success with `not_to`" do
message =
expect(~N[2017-08-07 01:10:10]).to_not(
be_close_to(~N[2017-08-19 01:10:10], {:hours, 1})
)
expect(message)
|> to(
eq "`~N[2017-08-07 01:10:10]` is not close to `~N[2017-08-19 01:10:10]` with delta `{:hours, 1}`."
)
end
it do: expect(~N[2017-08-07 01:10:10]).to(be_close_to(~N[2017-08-07 02:10:10], {:hours, 1}))
end
context "Success with NaiveDateTime with a granularity of minutes" do
it "checks success with `to`" do
message =
expect(~N[2017-08-07 01:10:10]).to(be_close_to(~N[2017-08-07 01:50:10], {:minutes, 40}))
expect(message)
|> to(
eq "`~N[2017-08-07 01:10:10]` is close to `~N[2017-08-07 01:50:10]` with delta `{:minutes, 40}`."
)
end
it "checks success with `not_to`" do
message =
expect(~N[2017-08-07 01:10:10]).to_not(
be_close_to(~N[2017-08-07 01:51:10], {:minutes, 40})
)
expect(message)
|> to(
eq "`~N[2017-08-07 01:10:10]` is not close to `~N[2017-08-07 01:51:10]` with delta `{:minutes, 40}`."
)
end
it do:
expect(~N[2017-08-07 01:10:10]).to(
be_close_to(~N[2017-08-07 01:50:10], {:minutes, 40})
)
end
context "Success with NaiveDateTime with a granularity of seconds" do
it "checks success with `to`" do
message =
expect(~N[2017-08-07 01:10:10]).to(be_close_to(~N[2017-08-07 01:10:11], {:seconds, 1}))
expect(message)
|> to(
eq "`~N[2017-08-07 01:10:10]` is close to `~N[2017-08-07 01:10:11]` with delta `{:seconds, 1}`."
)
end
it "checks success with `not_to`" do
message =
expect(~N[2017-08-07 01:10:10]).to_not(
be_close_to(~N[2017-08-07 01:10:12], {:seconds, 1})
)
expect(message)
|> to(
eq "`~N[2017-08-07 01:10:10]` is not close to `~N[2017-08-07 01:10:12]` with delta `{:seconds, 1}`."
)
end
it do:
expect(~N[2017-08-07 01:10:10]).to(be_close_to(~N[2017-08-07 01:10:11], {:seconds, 1}))
end
context "Success with NaiveDateTime with a granularity of microseconds" do
it "checks success with `to`" do
message =
expect(~N[2017-08-07 01:10:10.000001]).to(
be_close_to(~N[2017-08-07 01:10:10.000003], {:microseconds, 2})
)
expect(message)
|> to(
eq "`~N[2017-08-07 01:10:10.000001]` is close to `~N[2017-08-07 01:10:10.000003]` with delta `{:microseconds, 2}`."
)
end
it "checks success with `not_to`" do
message =
expect(~N[2017-08-07 01:10:10.000001]).to_not(
be_close_to(~N[2017-08-07 01:10:10.000003], {:microseconds, 1})
)
expect(message)
|> to(
eq "`~N[2017-08-07 01:10:10.000001]` is not close to `~N[2017-08-07 01:10:10.000003]` with delta `{:microseconds, 1}`."
)
end
it do:
expect(~N[2017-08-07 01:10:10.000001]).to(
be_close_to(~N[2017-08-07 01:10:10.000003], {:microseconds, 2})
)
end
context "Errors with NaiveDateTime" do
context "with `to`" do
before do
{:shared,
expectation: fn ->
expect(~N[2017-08-07 01:10:10]).to(
be_close_to(~N[2017-08-07 01:10:15], {:seconds, 3})
)
end,
message:
"Expected `~N[2017-08-07 01:10:10]` to be close to `~N[2017-08-07 01:10:15]` with delta `{:seconds, 3}`, but it isn't. The actual delta is {:seconds, 5}."}
end
it_behaves_like(CheckErrorSharedSpec)
end
context "with `not_to`" do
before do
{:shared,
expectation: fn ->
expect(~N[2017-08-07 01:10:10]).to_not(
be_close_to(~N[2017-08-07 01:10:15], {:seconds, 5})
)
end,
message:
"Expected `~N[2017-08-07 01:10:10]` not to be close to `~N[2017-08-07 01:10:15]` with delta `{:seconds, 5}`, but it isn't. The actual delta is {:seconds, 5}."}
end
it_behaves_like(CheckErrorSharedSpec)
end
end
context "Success with Time with a granularity of hours" do
it "checks success with `to`" do
message = expect(~T[01:10:10]).to(be_close_to(~T[02:10:10], {:hours, 1}))
expect(message)
|> to(eq "`~T[01:10:10]` is close to `~T[02:10:10]` with delta `{:hours, 1}`.")
end
it "checks success with `not_to`" do
message = expect(~T[01:10:10]).to_not(be_close_to(~T[03:10:10], {:hours, 1}))
expect(message)
|> to(eq "`~T[01:10:10]` is not close to `~T[03:10:10]` with delta `{:hours, 1}`.")
end
it do: expect(~T[01:10:10]).to(be_close_to(~T[02:10:10], {:hours, 1}))
end
context "Success with Time with a granularity of minutes" do
it "checks success with `to`" do
message = expect(~T[01:10:10]).to(be_close_to(~T[01:50:10], {:minutes, 40}))
expect(message)
|> to(eq "`~T[01:10:10]` is close to `~T[01:50:10]` with delta `{:minutes, 40}`.")
end
it "checks success with `not_to`" do
message = expect(~T[01:10:10]).to_not(be_close_to(~T[01:51:10], {:minutes, 40}))
expect(message)
|> to(eq "`~T[01:10:10]` is not close to `~T[01:51:10]` with delta `{:minutes, 40}`.")
end
it do: expect(~T[01:10:10]).to(be_close_to(~T[01:50:10], {:minutes, 40}))
end
context "Success with Time with a granularity of seconds" do
it "checks success with `to`" do
message = expect(~T[01:10:10]).to(be_close_to(~T[01:10:11], {:seconds, 1}))
expect(message)
|> to(eq "`~T[01:10:10]` is close to `~T[01:10:11]` with delta `{:seconds, 1}`.")
end
it "checks success with `not_to`" do
message = expect(~T[01:10:10]).to_not(be_close_to(~T[01:10:12], {:seconds, 1}))
expect(message)
|> to(eq "`~T[01:10:10]` is not close to `~T[01:10:12]` with delta `{:seconds, 1}`.")
end
it do: expect(~T[01:10:10]).to(be_close_to(~T[01:10:11], {:seconds, 1}))
end
context "Success with Time with a granularity of microseconds" do
it "checks success with `to`" do
message =
expect(~T[01:10:10.000001]).to(be_close_to(~T[01:10:10.000002], {:microseconds, 1}))
expect(message)
|> to(
eq "`~T[01:10:10.000001]` is close to `~T[01:10:10.000002]` with delta `{:microseconds, 1}`."
)
end
it "checks success with `not_to`" do
message =
expect(~T[01:10:10.000001]).to_not(be_close_to(~T[01:10:11.000002], {:microseconds, 1}))
expect(message)
|> to(
eq "`~T[01:10:10.000001]` is not close to `~T[01:10:11.000002]` with delta `{:microseconds, 1}`."
)
end
it do: expect(~T[01:10:10.000001]).to(be_close_to(~T[01:10:10.000002], {:microseconds, 1}))
end
context "Errors with Time" do
context "with `to`" do
before do
{:shared,
expectation: fn ->
expect(~T[01:10:10.000001]).to(be_close_to(~T[01:10:10.000006], {:microseconds, 3}))
end,
message:
"Expected `~T[01:10:10.000001]` to be close to `~T[01:10:10.000006]` with delta `{:microseconds, 3}`, but it isn't. The actual delta is {:microseconds, 5}."}
end
it_behaves_like(CheckErrorSharedSpec)
end
context "with `not_to`" do
before do
{:shared,
expectation: fn ->
expect(~T[01:10:10.000001]).to_not(
be_close_to(~T[01:10:10.000006], {:microseconds, 5})
)
end,
message:
"Expected `~T[01:10:10.000001]` not to be close to `~T[01:10:10.000006]` with delta `{:microseconds, 5}`, but it isn't. The actual delta is {:microseconds, 5}."}
end
it_behaves_like(CheckErrorSharedSpec)
end
end
let :datetime1, do: DateTime.Extension.from_naive!(~N[2017-08-07 01:10:10.000001], "Etc/UTC")
let :datetime3, do: DateTime.Extension.from_naive!(~N[2020-08-07 01:10:10.000001], "Etc/UTC")
context "Success with DateTime with a granularity of years" do
let :datetime2,
do: DateTime.Extension.from_naive!(~N[2018-08-07 01:10:10.000001], "Etc/UTC")
it "checks success with `to`" do
message = expect(datetime1()).to(be_close_to(datetime2(), {:years, 1}))
expect(message)
|> to(
eq "`#{inspect(datetime1())}` is close to `#{inspect(datetime2())}` with delta `{:years, 1}`."
)
end
it "checks success with `not_to`" do
message = expect(datetime1()).to_not(be_close_to(datetime3(), {:years, 2}))
expect(message)
|> to(
eq "`#{inspect(datetime1())}` is not close to `#{inspect(datetime3())}` with delta `{:years, 2}`."
)
end
it do: expect(datetime1()).to(be_close_to(datetime3(), {:years, 3}))
end
context "Success with DateTime with a granularity of months" do
let :datetime2,
do: DateTime.Extension.from_naive!(~N[2017-09-07 01:10:10.000001], "Etc/UTC")
it "checks success with `to`" do
message = expect(datetime1()).to(be_close_to(datetime2(), {:months, 1}))
expect(message)
|> to(
eq "`#{inspect(datetime1())}` is close to `#{inspect(datetime2())}` with delta `{:months, 1}`."
)
end
it "checks success with `not_to`" do
message = expect(datetime1()).to_not(be_close_to(datetime3(), {:months, 2}))
expect(message)
|> to(
eq "`#{inspect(datetime1())}` is not close to `#{inspect(datetime3())}` with delta `{:months, 2}`."
)
end
it do: expect(datetime1()).to(be_close_to(datetime2(), {:months, 1}))
end
context "Success with DateTime with a granularity of weeks" do
let :datetime2,
do: DateTime.Extension.from_naive!(~N[2017-08-14 01:10:10.000001], "Etc/UTC")
it "checks success with `to`" do
message = expect(datetime1()).to(be_close_to(datetime2(), {:weeks, 1}))
expect(message)
|> to(
eq "`#{inspect(datetime1())}` is close to `#{inspect(datetime2())}` with delta `{:weeks, 1}`."
)
end
it "checks success with `not_to`" do
message = expect(datetime1()).to_not(be_close_to(datetime3(), {:weeks, 1}))
expect(message)
|> to(
eq "`#{inspect(datetime1())}` is not close to `#{inspect(datetime3())}` with delta `{:weeks, 1}`."
)
end
it do: expect(datetime1()).to(be_close_to(datetime2(), {:weeks, 1}))
end
context "Success with DateTime with a granularity of days" do
let :datetime2,
do: DateTime.Extension.from_naive!(~N[2017-08-06 01:10:10.000001], "Etc/UTC")
let :datetime4,
do: DateTime.Extension.from_naive!(~N[2017-10-07 01:10:10.000001], "Etc/UTC")
it "checks success with `to`" do
message = expect(datetime1()).to(be_close_to(datetime2(), {:days, 1}))
expect(message)
|> to(
eq "`#{inspect(datetime1())}` is close to `#{inspect(datetime2())}` with delta `{:days, 1}`."
)
end
it "checks success with `not_to`" do
message = expect(datetime1()).to_not(be_close_to(datetime3(), {:days, 1}))
expect(message)
|> to(
eq "`#{inspect(datetime1())}` is not close to `#{inspect(datetime3())}` with delta `{:days, 1}`."
)
end
it do: expect(datetime1()).to(be_close_to(datetime4(), {:days, 61}))
end
context "Success with DateTime with a granularity of hours" do
let :datetime2,
do: DateTime.Extension.from_naive!(~N[2017-08-07 02:10:10.000001], "Etc/UTC")
it "checks success with `to`" do
message = expect(datetime1()).to(be_close_to(datetime2(), {:hours, 1}))
expect(message)
|> to(
eq "`#{inspect(datetime1())}` is close to `#{inspect(datetime2())}` with delta `{:hours, 1}`."
)
end
it "checks success with `not_to`" do
message = expect(datetime1()).to_not(be_close_to(datetime3(), {:hours, 1}))
expect(message)
|> to(
eq "`#{inspect(datetime1())}` is not close to `#{inspect(datetime3())}` with delta `{:hours, 1}`."
)
end
it do: expect(datetime1()).to(be_close_to(datetime2(), {:hours, 1}))
end
context "Success with DateTime with a granularity of minutes" do
let :datetime2,
do: DateTime.Extension.from_naive!(~N[2017-08-07 01:50:10.000001], "Etc/UTC")
it "checks success with `to`" do
message = expect(datetime1()).to(be_close_to(datetime2(), {:minutes, 40}))
expect(message)
|> to(
eq "`#{inspect(datetime1())}` is close to `#{inspect(datetime2())}` with delta `{:minutes, 40}`."
)
end
it "checks success with `not_to`" do
message = expect(datetime1()).to_not(be_close_to(datetime2(), {:minutes, 39}))
expect(message)
|> to(
eq "`#{inspect(datetime1())}` is not close to `#{inspect(datetime2())}` with delta `{:minutes, 39}`."
)
end
it do: expect(datetime1()).to(be_close_to(datetime2(), {:minutes, 40}))
end
context "Success with DateTime with a granularity of seconds" do
let :datetime2,
do: DateTime.Extension.from_naive!(~N[2017-08-07 01:10:12.000001], "Etc/UTC")
it "checks success with `to`" do
message = expect(datetime1()).to(be_close_to(datetime2(), {:seconds, 2}))
expect(message)
|> to(
eq "`#{inspect(datetime1())}` is close to `#{inspect(datetime2())}` with delta `{:seconds, 2}`."
)
end
it "checks success with `not_to`" do
message = expect(datetime1()).to_not(be_close_to(datetime2(), {:seconds, 1}))
expect(message)
|> to(
eq "`#{inspect(datetime1())}` is not close to `#{inspect(datetime2())}` with delta `{:seconds, 1}`."
)
end
it do: expect(datetime1()).to(be_close_to(datetime2(), {:seconds, 2}))
end
context "Success with DateTime with a granularity of microseconds" do
let :datetime2,
do: DateTime.Extension.from_naive!(~N[2017-08-07 01:10:10.000003], "Etc/UTC")
it "checks success with `to`" do
message = expect(datetime1()).to(be_close_to(datetime2(), {:microseconds, 2}))
expect(message)
|> to(
eq "`#{inspect(datetime1())}` is close to `#{inspect(datetime2())}` with delta `{:microseconds, 2}`."
)
end
it "checks success with `not_to`" do
message = expect(datetime1()).to_not(be_close_to(datetime2(), {:microseconds, 1}))
expect(message)
|> to(
eq "`#{inspect(datetime1())}` is not close to `#{inspect(datetime2())}` with delta `{:microseconds, 1}`."
)
end
it do: expect(datetime1()).to(be_close_to(datetime2(), {:microseconds, 2}))
end
context "Success with DateTime with utc and std offsets to represent time zone differences" do
let :datetime_pst,
do: %DateTime{
year: 2017,
month: 3,
day: 15,
hour: 1,
minute: 30,
second: 30,
microsecond: {1, 6},
std_offset: 1 * 3600,
utc_offset: -8 * 3600,
zone_abbr: "PST",
time_zone: "America/Los_Angeles"
}
let :datetime_est,
do: %DateTime{
year: 2017,
month: 3,
day: 15,
hour: 6,
minute: 30,
second: 30,
microsecond: {1, 6},
std_offset: 1 * 3600,
utc_offset: -5 * 3600,
zone_abbr: "EST",
time_zone: "America/New_York"
}
it "checks success with `to`" do
message = expect(datetime_pst()).to(be_close_to(datetime_est(), {:hours, 2}))
expect(message)
|> to(
eq "`#{inspect(datetime_pst())}` is close to `#{inspect(datetime_est())}` with delta `{:hours, 2}`."
)
end
it "checks success with `not_to`" do
message = expect(datetime_pst()).to_not(be_close_to(datetime_est(), {:hours, 1}))
expect(message)
|> to(
eq "`#{inspect(datetime_pst())}` is not close to `#{inspect(datetime_est())}` with delta `{:hours, 1}`."
)
end
it do: expect(datetime_pst()).to(be_close_to(datetime_est(), {:hours, 2}))
end
context "Errors with DateTime" do
let :datetime1,
do: DateTime.Extension.from_naive!(~N[2017-08-07 01:10:12.000001], "Etc/UTC")
let :datetime2,
do: DateTime.Extension.from_naive!(~N[2017-08-07 01:10:12.000006], "Etc/UTC")
context "with `to`" do
before do
{:shared,
expectation: fn ->
expect(datetime1()).to(be_close_to(datetime2(), {:microseconds, 3}))
end,
message:
"Expected `#{inspect(datetime1())}` to be close to `#{inspect(datetime2())}` with delta `{:microseconds, 3}`, but it isn't. The actual delta is {:microseconds, 5}."}
end
it_behaves_like(CheckErrorSharedSpec)
end
context "with `not_to`" do
before do
{:shared,
expectation: fn ->
expect(datetime1()).to_not(be_close_to(datetime2(), {:microseconds, 5}))
end,
message:
"Expected `#{inspect(datetime1())}` not to be close to `#{inspect(datetime2())}` with delta `{:microseconds, 5}`, but it isn't. The actual delta is {:microseconds, 5}."}
end
it_behaves_like(CheckErrorSharedSpec)
end
end
end
end
| 33.840849 | 182 | 0.548793 |
0341597716272af73154739d3904cf2b0285e435 | 361 | ex | Elixir | lib/hive/application.ex | hive-fleet/hive-state | 12478d8540cc93863237f6456ade0de68c56501e | [
"Apache-2.0"
] | 5 | 2020-05-05T17:10:44.000Z | 2021-03-24T20:37:48.000Z | lib/hive/application.ex | hive-fleet/hive-state | 12478d8540cc93863237f6456ade0de68c56501e | [
"Apache-2.0"
] | 10 | 2020-05-09T22:06:07.000Z | 2020-05-09T22:10:14.000Z | lib/hive/application.ex | hive-fleet/hive-state | 12478d8540cc93863237f6456ade0de68c56501e | [
"Apache-2.0"
] | 2 | 2020-05-06T14:47:33.000Z | 2021-06-11T21:12:29.000Z | defmodule Hive.Application do
@moduledoc false
use Application
def start(_type, _args) do
children = [
{Hive.VehicleSupervisor, []},
{Registry, keys: :unique, name: Hive.VehicleRegistry},
{Hive.TelemetryStore, []}
]
opts = [strategy: :one_for_one, name: Hive.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 22.5625 | 60 | 0.66759 |
034179bde281eada0b536b24d073a0eb1fcd88de | 12,761 | exs | Elixir | test/elixir/test/design_docs_test.exs | frapa/couchdb | 6c28960f0fe2eec06aca7d58fd73f3c7cdbe1112 | [
"Apache-2.0"
] | null | null | null | test/elixir/test/design_docs_test.exs | frapa/couchdb | 6c28960f0fe2eec06aca7d58fd73f3c7cdbe1112 | [
"Apache-2.0"
] | null | null | null | test/elixir/test/design_docs_test.exs | frapa/couchdb | 6c28960f0fe2eec06aca7d58fd73f3c7cdbe1112 | [
"Apache-2.0"
] | null | null | null | defmodule DesignDocsTest do
use CouchTestCase
@moduletag :design_docs
@moduletag kind: :single_node
@design_doc %{
_id: "_design/test",
language: "javascript",
autoupdate: false,
whatever: %{
stringzone: "exports.string = 'plankton';",
commonjs: %{
whynot: """
exports.test = require('../stringzone');
exports.foo = require('whatever/stringzone');
""",
upper: """
exports.testing = require('./whynot').test.string.toUpperCase()+
module.id+require('./whynot').foo.string
""",
circular_one: "require('./circular_two'); exports.name = 'One';",
circular_two: "require('./circular_one'); exports.name = 'Two';"
},
# paths relative to parent
idtest1: %{
a: %{
b: %{d: "module.exports = require('../c/e').id;"},
c: %{e: "exports.id = module.id;"}
}
},
# multiple paths relative to parent
idtest2: %{
a: %{
b: %{d: "module.exports = require('../../a/c/e').id;"},
c: %{e: "exports.id = module.id;"}
}
},
# paths relative to module
idtest3: %{
a: %{
b: "module.exports = require('./c/d').id;",
c: %{
d: "module.exports = require('./e');",
e: "exports.id = module.id;"
}
}
},
# paths relative to module and parent
idtest4: %{
a: %{
b: "module.exports = require('../a/./c/d').id;",
c: %{
d: "module.exports = require('./e');",
e: "exports.id = module.id;"
}
}
},
# paths relative to root
idtest5: %{
a: "module.exports = require('whatever/idtest5/b').id;",
b: "exports.id = module.id;"
}
},
views: %{
all_docs_twice: %{
map: """
function(doc) {
emit(doc.integer, null);
emit(doc.integer, null);
}
"""
},
no_docs: %{
map: """
function(doc) {}
"""
},
single_doc: %{
map: """
function(doc) {
if (doc._id === "1") {
emit(1, null);
}
}
"""
},
summate: %{
map: """
function(doc) {
emit(doc.integer, doc.integer);
}
""",
reduce: """
function(keys, values) {
return sum(values);
}
"""
},
summate2: %{
map: """
function(doc) {
emit(doc.integer, doc.integer);
}
""",
reduce: """
function(keys, values) {
return sum(values);
}
"""
},
huge_src_and_results: %{
map: """
function(doc) {
if (doc._id === "1") {
emit("#{String.duplicate("a", 16)}", null);
}
}
""",
reduce: """
function(keys, values) {
return "#{String.duplicate("a", 16)}";
}
"""
},
lib: %{
baz: "exports.baz = 'bam';",
foo: %{
foo: "exports.foo = 'bar';",
boom: "exports.boom = 'ok';",
zoom: "exports.zoom = 'yeah';"
}
},
commonjs: %{
map: """
function(doc) {
emit(null, require('views/lib/foo/boom').boom);
}
"""
}
},
shows: %{
simple: """
function() {
return 'ok';
}
""",
requirey: """
function() {
var lib = require('whatever/commonjs/upper');
return lib.testing;
}
""",
circular: """
function() {
var lib = require('whatever/commonjs/upper');
return JSON.stringify(this);
}
""",
circular_require: """
function() {
return require('whatever/commonjs/circular_one').name;
}
""",
idtest1: """
function() {
return require('whatever/idtest1/a/b/d');
}
""",
idtest2: """
function() {
return require('whatever/idtest2/a/b/d');
}
""",
idtest3: """
function() {
return require('whatever/idtest3/a/b');
}
""",
idtest4: """
function() {
return require('whatever/idtest4/a/b');
}
""",
idtest5: """
function() {
return require('whatever/idtest5/a');
}
"""
}
}
setup_all do
db_name = random_db_name()
{:ok, _} = create_db(db_name)
on_exit(fn -> delete_db(db_name) end)
{:ok, _} = create_doc(db_name, @design_doc)
{:ok, _} = create_doc(db_name, %{})
{:ok, [db_name: db_name]}
end
test "consistent _rev for design docs", context do
resp = Couch.get("/#{context[:db_name]}/_design/test")
assert resp.status_code == 200
first_db_rev = resp.body["_rev"]
second_db_name = random_db_name()
create_db(second_db_name)
{:ok, resp2} = create_doc(second_db_name, @design_doc)
assert first_db_rev == resp2.body["rev"]
end
@tag :pending # HTTP 410
test "commonjs require", context do
db_name = context[:db_name]
resp = Couch.get("/#{db_name}/_design/test/_show/requirey")
assert resp.status_code == 200
assert resp.body == "PLANKTONwhatever/commonjs/upperplankton"
resp = Couch.get("/#{db_name}/_design/test/_show/circular")
assert resp.status_code == 200
result =
resp.body
|> IO.iodata_to_binary()
|> :jiffy.decode([:return_maps])
assert result["language"] == "javascript"
end
@tag :pending # HTTP 410
test "circular commonjs dependencies", context do
db_name = context[:db_name]
resp = Couch.get("/#{db_name}/_design/test/_show/circular_require")
assert resp.status_code == 200
assert resp.body == "One"
end
@tag :pending # HTTP 410
test "module id values are as expected", context do
db_name = context[:db_name]
check_id_value(db_name, "idtest1", "whatever/idtest1/a/c/e")
check_id_value(db_name, "idtest2", "whatever/idtest2/a/c/e")
check_id_value(db_name, "idtest3", "whatever/idtest3/a/c/e")
check_id_value(db_name, "idtest4", "whatever/idtest4/a/c/e")
check_id_value(db_name, "idtest5", "whatever/idtest5/b")
end
defp check_id_value(db_name, id, expected) do
resp = Couch.get("/#{db_name}/_design/test/_show/#{id}")
assert resp.status_code == 200
assert resp.body == expected
end
@tag :pending # No compact_running key
@tag :with_db
test "that we get correct design doc info back", context do
db_name = context[:db_name]
{:ok, _} = create_doc(db_name, @design_doc)
resp = Couch.get("/#{db_name}/_design/test/_info")
prev_view_sig = resp.body["view_index"]["signature"]
prev_view_size = resp.body["view_index"]["sizes"]["file"]
num_docs = 500
bulk_save(db_name, make_docs(1..(num_docs + 1)))
Couch.get("/#{db_name}/_design/test/_view/summate", query: [stale: "ok"])
for _x <- 0..1 do
resp = Couch.get("/#{db_name}/_design/test/_info")
assert resp.body["name"] == "test"
assert resp.body["view_index"]["sizes"]["file"] == prev_view_size
assert resp.body["view_index"]["compact_running"] == false
assert resp.body["view_index"]["signature"] == prev_view_sig
end
end
test "commonjs in map functions", context do
db_name = context[:db_name]
resp = Couch.get("/#{db_name}/_design/test/_view/commonjs", query: [limit: 1])
assert resp.status_code == 200
assert Enum.at(resp.body["rows"], 0)["value"] == "ok"
end
test "_all_docs view returns correctly with keys", context do
db_name = context[:db_name]
resp =
Couch.get("/#{db_name}/_all_docs",
query: [startkey: :jiffy.encode("_design"), endkey: :jiffy.encode("_design0")]
)
assert length(resp.body["rows"]) == 1
end
@tag :with_db
test "all_docs_twice", context do
db_name = context[:db_name]
{:ok, _} = create_doc(db_name, @design_doc)
num_docs = 500
bulk_save(db_name, make_docs(1..(2 * num_docs)))
for _x <- 0..1 do
test_all_docs_twice(db_name, num_docs)
end
end
defp test_all_docs_twice(db_name, num_docs) do
resp = Couch.get("/#{db_name}/_design/test/_view/all_docs_twice")
assert resp.status_code == 200
rows = resp.body["rows"]
for x <- 0..num_docs do
assert Map.get(Enum.at(rows, 2 * x), "key") == x + 1
assert Map.get(Enum.at(rows, 2 * x + 1), "key") == x + 1
end
resp = Couch.get("/#{db_name}/_design/test/_view/no_docs")
assert resp.body["total_rows"] == 0
resp = Couch.get("/#{db_name}/_design/test/_view/single_doc")
assert resp.body["total_rows"] == 1
end
@tag :with_db
test "language not specified, Javascript is implied", context do
db_name = context[:db_name]
bulk_save(db_name, make_docs(1..2))
design_doc_2 = %{
_id: "_design/test2",
views: %{
single_doc: %{
map: """
function(doc) {
if (doc._id === "1") {
emit(1, null);
}
}
"""
}
}
}
{:ok, _} = create_doc(db_name, design_doc_2)
resp = Couch.get("/#{db_name}/_design/test2/_view/single_doc")
assert resp.status_code == 200
assert length(resp.body["rows"]) == 1
end
@tag :with_db
test "startkey and endkey", context do
db_name = context[:db_name]
{:ok, _} = create_doc(db_name, @design_doc)
num_docs = 500
bulk_save(db_name, make_docs(1..(2 * num_docs)))
resp = Couch.get("/#{db_name}/_design/test/_view/summate")
assert Enum.at(resp.body["rows"], 0)["value"] == summate(num_docs * 2)
resp =
Couch.get("/#{db_name}/_design/test/_view/summate",
query: [startkey: 4, endkey: 4]
)
assert Enum.at(resp.body["rows"], 0)["value"] == 4
resp =
Couch.get("/#{db_name}/_design/test/_view/summate",
query: [startkey: 4, endkey: 5]
)
assert Enum.at(resp.body["rows"], 0)["value"] == 9
resp =
Couch.get("/#{db_name}/_design/test/_view/summate",
query: [startkey: 4, endkey: 6]
)
assert Enum.at(resp.body["rows"], 0)["value"] == 15
# test start_key and end_key aliases
resp =
Couch.get("/#{db_name}/_design/test/_view/summate",
query: [start_key: 4, end_key: 6]
)
assert Enum.at(resp.body["rows"], 0)["value"] == 15
# Verify that a shared index (view def is an exact copy of "summate")
# does not confuse the reduce stage
resp =
Couch.get("/#{db_name}/_design/test/_view/summate2",
query: [startkey: 4, endkey: 6]
)
assert Enum.at(resp.body["rows"], 0)["value"] == 15
for x <- 0..Integer.floor_div(num_docs, 60) do
resp =
Couch.get("/#{db_name}/_design/test/_view/summate",
query: [startkey: x * 30, endkey: num_docs - x * 30]
)
assert Enum.at(resp.body["rows"], 0)["value"] ==
summate(num_docs - x * 30) - summate(x * 30 - 1)
end
end
defp summate(n) do
(n + 1) * (n / 2)
end
@tag :with_db
test "design doc deletion", context do
db_name = context[:db_name]
{:ok, resp} = create_doc(db_name, @design_doc)
del_resp =
Couch.delete("/#{db_name}/#{resp.body["id"]}", query: [rev: resp.body["rev"]])
assert del_resp.status_code == 200
resp = Couch.get("/#{db_name}/#{resp.body["id"]}")
assert resp.status_code == 404
resp = Couch.get("/#{db_name}/_design/test/_view/no_docs")
assert resp.status_code == 404
end
@tag :with_db
test "validate doc update", context do
db_name = context[:db_name]
# COUCHDB-1227 - if a design document is deleted, by adding a "_deleted"
# field with the boolean value true, its validate_doc_update functions
# should no longer have effect.
ddoc = %{
_id: "_design/test",
language: "javascript",
validate_doc_update: """
function(newDoc, oldDoc, userCtx, secObj) {
if (newDoc.value % 2 == 0) {
throw({forbidden: "dont like even numbers"});
}
return true;
}
"""
}
{:ok, resp_ddoc} = create_doc(db_name, ddoc)
resp =
Couch.post("/#{db_name}",
body: %{_id: "doc1", value: 4}
)
assert resp.status_code == 403
assert resp.body["reason"] == "dont like even numbers"
ddoc_resp = Couch.get("/#{db_name}/#{resp_ddoc.body["id"]}")
ddoc =
ddoc_resp.body
|> Map.put("_deleted", true)
del_resp =
Couch.post("/#{db_name}",
body: ddoc
)
assert del_resp.status_code in [201, 202]
{:ok, _} = create_doc(db_name, %{_id: "doc1", value: 4})
end
end
| 26.31134 | 86 | 0.543218 |
03419b394ff15631de7148c504649fda75395878 | 3,034 | exs | Elixir | test/number_insight/advanced_test.exs | ksherman/nexmo-elixir | b907abced1f7eb297819cd5b9256e04e6f06bc96 | [
"MIT"
] | 4 | 2020-01-31T03:34:25.000Z | 2022-02-25T07:05:30.000Z | test/number_insight/advanced_test.exs | ksherman/nexmo-elixir | b907abced1f7eb297819cd5b9256e04e6f06bc96 | [
"MIT"
] | 10 | 2019-08-29T06:58:02.000Z | 2020-01-27T13:26:05.000Z | test/number_insight/advanced_test.exs | ksherman/nexmo-elixir | b907abced1f7eb297819cd5b9256e04e6f06bc96 | [
"MIT"
] | 2 | 2020-03-23T03:02:13.000Z | 2021-09-03T23:18:45.000Z | defmodule Nexmo.NumberInsight.AdvancedTest do
use ExUnit.Case
setup do
api_key = "a123456"
api_secret = "b123456"
# setup test responses
valid_response = %{
"status" => 0,
"status_message" => "Success",
"request_id" => "aaaaaaaa-bbbb-cccc-dddd-0123456789ab",
"international_format_number" => "447700900000",
"national_format_number" => "07700 900000",
"country_code" => "GB",
"country_code_iso3" => "GBR",
"country_name" => "United Kingdom",
"country_prefix" => "44",
"request_price" => "0.04000000",
"refund_price" => "0.01500000",
"remaining_balance" => "1.23456789",
"current_carrier" => %{
"network_code" => "12345",
"name" => "Acme Inc",
"country" => "GB",
"network_type" => "mobile"
},
"original_carrier" => %{
"network_code" => "12345",
"name" => "Acme Inc",
"country" => "GB",
"network_type" => "mobile"
},
"ported" => "not_ported",
"roaming" => %{
"status" => "roaming",
"roaming_country_code" => "US",
"roaming_network_code" => 12345,
"roaming_network_name" => "Acme Inc"
},
"caller_identity" => %{
"caller_type" => "consumer",
"caller_name" => "John Smith",
"first_name" => "John",
"last_name" => "Smith"
},
"lookup_outcome" => "0",
"lookup_outcome_message" => "Success",
"valid_number" => "valid",
"reachable" => "reachable",
"ip" => %{
"address" => "123.0.0.255",
"ip_match_level" => "country",
"ip_country" => "GB",
"ip_city" => "London"
},
"ip_warnings" => "unknown"
}
# setup bypass
bypass = Bypass.open()
orig_endpoint = System.get_env "NUMBER_INSIGHT_API_ENDPOINT"
bypass_url = "http://localhost:#{bypass.port}"
System.put_env "NUMBER_INSIGHT_API_ENDPOINT", bypass_url
orig_api_key = System.get_env "NEXMO_API_KEY"
System.put_env "NEXMO_API_KEY", api_key
orig_api_secret = System.get_env "NEXMO_API_SECRET"
System.put_env "NEXMO_API_SECRET", api_secret
on_exit fn ->
System.put_env "NEXMO_API_KEY", orig_api_key
System.put_env "NEXMO_API_SECRET", orig_api_secret
System.put_env "NUMBER_INSIGHT_API_ENDPOINT", orig_endpoint
end
{:ok, %{
api_key: api_key,
api_secret: api_secret,
bypass: bypass,
valid_response: valid_response
}}
end
test "sends valid request to Nexmo", %{
bypass: bypass,
valid_response: valid_response
} do
Bypass.expect bypass, fn conn ->
assert "/advanced/json" == conn.request_path
assert "api_key=a123456&api_secret=b123456&number=447700900000" == conn.query_string
assert "GET" == conn.method
Plug.Conn.send_resp(conn, 200, Poison.encode!(valid_response))
end
response = Nexmo.NumberInsight.advanced(%{number: "447700900000"})
assert valid_response == elem(response, 1).body
end
end | 31.604167 | 90 | 0.597231 |
0341cad5c8c77a6040179e89086e23976de05d5b | 778 | ex | Elixir | test/support/channel_case.ex | bmbferreira/slack-quiet | 3bd08f102e1236189fd81580ded8b6548ddfc2cd | [
"MIT"
] | 5 | 2018-01-15T19:51:37.000Z | 2018-01-31T18:30:01.000Z | test/support/channel_case.ex | bmbferreira/slack-quiet | 3bd08f102e1236189fd81580ded8b6548ddfc2cd | [
"MIT"
] | null | null | null | test/support/channel_case.ex | bmbferreira/slack-quiet | 3bd08f102e1236189fd81580ded8b6548ddfc2cd | [
"MIT"
] | 1 | 2018-06-25T19:16:39.000Z | 2018-06-25T19:16:39.000Z | defmodule SlackQuietWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint SlackQuietWeb.Endpoint
end
end
setup _tags do
:ok
end
end
| 24.3125 | 58 | 0.730077 |
0341f6621438cdb7bb028f7e3d80d961c952beb7 | 4,127 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/org_chart_spec.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/org_chart_spec.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/sheets/lib/google_api/sheets/v4/model/org_chart_spec.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Sheets.V4.Model.OrgChartSpec do
@moduledoc """
An org chart. Org charts require a unique set of labels in labels and may optionally include parent_labels and tooltips. parent_labels contain, for each node, the label identifying the parent node. tooltips contain, for each node, an optional tooltip. For example, to describe an OrgChart with Alice as the CEO, Bob as the President (reporting to Alice) and Cathy as VP of Sales (also reporting to Alice), have labels contain "Alice", "Bob", "Cathy", parent_labels contain "", "Alice", "Alice" and tooltips contain "CEO", "President", "VP Sales".
## Attributes
* `labels` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data containing the labels for all the nodes in the chart. Labels must be unique.
* `nodeColor` (*type:* `GoogleApi.Sheets.V4.Model.Color.t`, *default:* `nil`) - The color of the org chart nodes.
* `nodeColorStyle` (*type:* `GoogleApi.Sheets.V4.Model.ColorStyle.t`, *default:* `nil`) - The color of the org chart nodes. If node_color is also set, this field takes precedence.
* `nodeSize` (*type:* `String.t`, *default:* `nil`) - The size of the org chart nodes.
* `parentLabels` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data containing the label of the parent for the corresponding node. A blank value indicates that the node has no parent and is a top-level node. This field is optional.
* `selectedNodeColor` (*type:* `GoogleApi.Sheets.V4.Model.Color.t`, *default:* `nil`) - The color of the selected org chart nodes.
* `selectedNodeColorStyle` (*type:* `GoogleApi.Sheets.V4.Model.ColorStyle.t`, *default:* `nil`) - The color of the selected org chart nodes. If selected_node_color is also set, this field takes precedence.
* `tooltips` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data containing the tooltip for the corresponding node. A blank value results in no tooltip being displayed for the node. This field is optional.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:labels => GoogleApi.Sheets.V4.Model.ChartData.t(),
:nodeColor => GoogleApi.Sheets.V4.Model.Color.t(),
:nodeColorStyle => GoogleApi.Sheets.V4.Model.ColorStyle.t(),
:nodeSize => String.t(),
:parentLabels => GoogleApi.Sheets.V4.Model.ChartData.t(),
:selectedNodeColor => GoogleApi.Sheets.V4.Model.Color.t(),
:selectedNodeColorStyle => GoogleApi.Sheets.V4.Model.ColorStyle.t(),
:tooltips => GoogleApi.Sheets.V4.Model.ChartData.t()
}
field(:labels, as: GoogleApi.Sheets.V4.Model.ChartData)
field(:nodeColor, as: GoogleApi.Sheets.V4.Model.Color)
field(:nodeColorStyle, as: GoogleApi.Sheets.V4.Model.ColorStyle)
field(:nodeSize)
field(:parentLabels, as: GoogleApi.Sheets.V4.Model.ChartData)
field(:selectedNodeColor, as: GoogleApi.Sheets.V4.Model.Color)
field(:selectedNodeColorStyle, as: GoogleApi.Sheets.V4.Model.ColorStyle)
field(:tooltips, as: GoogleApi.Sheets.V4.Model.ChartData)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.OrgChartSpec do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.OrgChartSpec.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.OrgChartSpec do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 60.691176 | 548 | 0.727405 |
0341fa056ad0a1b07326fa1d68a8af62343a40e0 | 2,189 | exs | Elixir | config/prod.exs | mreishus/demon_spirit_umbrella | 1ab9161427361ac7d35132fce2aed36923896a4b | [
"MIT"
] | 12 | 2019-09-17T13:47:57.000Z | 2021-12-11T17:11:11.000Z | config/prod.exs | mreishus/demon_spirit_umbrella | 1ab9161427361ac7d35132fce2aed36923896a4b | [
"MIT"
] | 181 | 2019-10-15T01:21:44.000Z | 2021-08-31T19:26:54.000Z | config/prod.exs | mreishus/demon_spirit_umbrella | 1ab9161427361ac7d35132fce2aed36923896a4b | [
"MIT"
] | 2 | 2020-07-11T02:18:46.000Z | 2021-05-31T10:46:39.000Z | use Mix.Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :demon_spirit_web, DemonSpiritWeb.Endpoint,
url: [host: "demonspirit.xyz", port: 80],
code_reloader: false,
cache_static_manifest: "priv/static/cache_manifest.json",
check_origin: [
"//demonspirit.xyz",
"//example.com",
"//localhost",
"//172.22.2.30:31229",
"//172.22.2.31:31229",
"//172.22.2.32:31229",
"//172.22.2.33:31229"
]
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :demon_spirit_web, DemonSpiritWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# :inet6,
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :demon_spirit_web, DemonSpiritWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# Do not print debug messages in production
config :logger, level: :info
# No longer using prod.secret.exs - Letting
# the release system check releases.exs at runtime
| 33.676923 | 66 | 0.696208 |
034205a0be9b3c55012aef25a59d671bc6392a7b | 6,801 | ex | Elixir | lib/mix/pow.ex | randaalex/pow | 2a8c8db4652f7cb2c58d3a897e02b1d47e76f27b | [
"MIT"
] | 1 | 2021-06-25T10:36:01.000Z | 2021-06-25T10:36:01.000Z | lib/mix/pow.ex | randaalex/pow | 2a8c8db4652f7cb2c58d3a897e02b1d47e76f27b | [
"MIT"
] | null | null | null | lib/mix/pow.ex | randaalex/pow | 2a8c8db4652f7cb2c58d3a897e02b1d47e76f27b | [
"MIT"
] | 1 | 2020-07-13T01:11:17.000Z | 2020-07-13T01:11:17.000Z | defmodule Mix.Pow do
@moduledoc """
Utilities module for mix tasks.
"""
alias Mix.{Dep, Project}
@doc """
Raises an exception if the project is an umbrella app.
"""
@spec no_umbrella!(binary()) :: :ok | no_return
def no_umbrella!(task) do
if Project.umbrella?() do
Mix.raise("mix #{task} can only be run inside an application directory")
end
:ok
end
# TODO: Remove by 1.1.0
@doc false
@deprecated "Use `ensure_ecto!` or `ensure_phoenix!` instead"
@spec ensure_dep!(binary(), atom(), OptionParser.argv()) :: :ok | no_return
def ensure_dep!(task, dep, _args) do
fetch_deps()
|> top_level_dep_in_deps?(dep)
|> case do
true ->
:ok
false ->
Mix.raise("mix #{task} can only be run inside an application directory that has #{inspect dep} as dependency")
end
end
@doc """
Raises an exception if application doesn't have Ecto as dependency.
"""
@spec ensure_ecto!(binary(), OptionParser.argv()) :: :ok | no_return
def ensure_ecto!(task, _args) do
deps = fetch_deps()
cond do
top_level_dep_in_deps?(deps, :ecto) -> :ok
top_level_dep_in_deps?(deps, :ecto_sql) -> :ok
true -> Mix.raise("mix #{task} can only be run inside an application directory that has :ecto or :ecto_sql as dependency")
end
end
defp top_level_dep_in_deps?(deps, dep) do
Enum.any?(deps, fn
%Mix.Dep{app: ^dep, top_level: true} -> true
_any -> false
end)
end
# TODO: Remove by 1.1.0 and only support Elixir 1.7
defp fetch_deps do
System.version()
|> Version.match?("~> 1.6.0")
|> case do
true -> apply(Dep, :loaded, [[]])
false -> apply(Dep, :load_on_environment, [[]])
end
end
@doc """
Raises an exception if application doesn't have Phoenix as dependency.
"""
@spec ensure_phoenix!(binary(), OptionParser.argv()) :: :ok | no_return
def ensure_phoenix!(task, _args) do
case top_level_dep_in_deps?(fetch_deps(), :phoenix) do
true -> :ok
false -> Mix.raise("mix #{task} can only be run inside an application directory that has :phoenix as dependency")
end
end
@doc """
Parses argument options into a map.
"""
@spec parse_options(OptionParser.argv(), Keyword.t(), Keyword.t()) :: {map(), OptionParser.argv(), OptionParser.errors()}
def parse_options(args, switches, default_opts) do
{opts, parsed, invalid} = OptionParser.parse(args, switches: switches)
default_opts = to_map(default_opts)
opts = to_map(opts)
config =
default_opts
|> Map.merge(opts)
|> context_app_to_atom()
{config, parsed, invalid}
end
defp to_map(keyword) do
Enum.reduce(keyword, %{}, fn {key, value}, map ->
case Map.get(map, key) do
nil ->
Map.put(map, key, value)
existing_value ->
value = List.wrap(existing_value) ++ [value]
Map.put(map, key, value)
end
end)
end
defp context_app_to_atom(%{context_app: context_app} = config),
do: Map.put(config, :context_app, String.to_atom(context_app))
defp context_app_to_atom(config),
do: config
@doc """
Parses arguments into schema name and schema plural.
"""
@spec schema_options_from_args([binary()]) :: map()
def schema_options_from_args(_opts \\ [])
def schema_options_from_args([schema, plural | _rest]), do: %{schema_name: schema, schema_plural: plural}
def schema_options_from_args(_any), do: %{schema_name: "Users.User", schema_plural: "users"}
@doc false
@spec validate_schema_args!([binary()], binary()) :: map() | no_return()
def validate_schema_args!([schema, plural | _rest] = args, task) do
cond do
not schema_valid?(schema) ->
raise_invalid_schema_args_error!("Expected the schema argument, #{inspect schema}, to be a valid module name", task)
not plural_valid?(plural) ->
raise_invalid_schema_args_error!("Expected the plural argument, #{inspect plural}, to be all lowercase using snake_case convention", task)
true ->
schema_options_from_args(args)
end
end
def validate_schema_args!([_schema | _rest], task) do
raise_invalid_schema_args_error!("Invalid arguments", task)
end
def validate_schema_args!([], _task), do: schema_options_from_args()
defp schema_valid?(schema), do: schema =~ ~r/^[A-Z]\w*(\.[A-Z]\w*)*$/
defp plural_valid?(plural), do: plural =~ ~r/^[a-z\_]*$/
@spec raise_invalid_schema_args_error!(binary(), binary()) :: no_return()
defp raise_invalid_schema_args_error!(msg, task) do
Mix.raise("""
#{msg}
mix #{task} accepts both a module name and the plural of the resource:
mix #{task} Users.User users
""")
end
# TODO: Remove by 1.1.0
@doc false
@deprecated "Please use `Pow.Phoenix.parse_structure/1` instead"
@spec context_app :: atom() | no_return
def context_app do
this_app = otp_app()
this_app
|> Application.get_env(:generators, [])
|> Keyword.get(:context_app)
|> case do
nil -> this_app
false -> Mix.raise("No context_app configured for current application")
{app, _path} -> app
app -> app
end
end
@doc false
@spec otp_app :: atom() | no_return
def otp_app do
Keyword.fetch!(Mix.Project.config(), :app)
end
# TODO: Remove by 1.1.0
@doc false
@deprecated "Use `app_base/1` instead"
@spec context_base(atom()) :: atom()
def context_base(app), do: app_base(app)
@doc """
Fetches the context base module for the app.
"""
@spec app_base(atom()) :: atom()
def app_base(app) do
case Application.get_env(app, :namespace, app) do
^app ->
app
|> to_string()
|> Macro.camelize()
|> List.wrap()
|> Module.concat()
mod ->
mod
end
end
@doc """
Fetches the library path for the context app.
"""
@spec context_lib_path(atom(), Path.t()) :: Path.t()
def context_lib_path(ctx_app, rel_path) do
context_app_path(ctx_app, Path.join(["lib", to_string(ctx_app), rel_path]))
end
defp context_app_path(ctx_app, rel_path) when is_atom(ctx_app) do
this_app = otp_app()
if ctx_app == this_app do
rel_path
else
app_path =
case Application.get_env(this_app, :generators)[:context_app] do
{^ctx_app, path} -> Path.relative_to_cwd(path)
_ -> mix_app_path(ctx_app, this_app)
end
Path.join(app_path, rel_path)
end
end
defp mix_app_path(app, this_otp_app) do
case Mix.Project.deps_paths() do
%{^app => path} ->
Path.relative_to_cwd(path)
_deps ->
Mix.raise("No directory for context_app #{inspect(app)} found in #{this_otp_app}'s deps.")
end
end
end
| 29.314655 | 146 | 0.635201 |
03420f46c899ef33bdc6ce99373224ca40a6220f | 11,031 | ex | Elixir | debian/osvr-rendermanager/debian/manpage.xml.ex | fkmclane/OSVR-Packaging | 0cacf55e35752d22f21b67610c103e63e75bbca5 | [
"MIT"
] | null | null | null | debian/osvr-rendermanager/debian/manpage.xml.ex | fkmclane/OSVR-Packaging | 0cacf55e35752d22f21b67610c103e63e75bbca5 | [
"MIT"
] | null | null | null | debian/osvr-rendermanager/debian/manpage.xml.ex | fkmclane/OSVR-Packaging | 0cacf55e35752d22f21b67610c103e63e75bbca5 | [
"MIT"
] | null | null | null | <?xml version='1.0' encoding='UTF-8'?>
<!DOCTYPE refentry PUBLIC "-//OASIS//DTD DocBook XML V4.5//EN"
"http://www.oasis-open.org/docbook/xml/4.5/docbookx.dtd" [
<!--
`xsltproc -''-nonet \
-''-param man.charmap.use.subset "0" \
-''-param make.year.ranges "1" \
-''-param make.single.year.ranges "1" \
/usr/share/xml/docbook/stylesheet/docbook-xsl/manpages/docbook.xsl \
manpage.xml'
A manual page <package>.<section> will be generated. You may view the
manual page with: nroff -man <package>.<section> | less'. A typical entry
in a Makefile or Makefile.am is:
DB2MAN = /usr/share/sgml/docbook/stylesheet/xsl/docbook-xsl/manpages/docbook.xsl
XP = xsltproc -''-nonet -''-param man.charmap.use.subset "0"
manpage.1: manpage.xml
$(XP) $(DB2MAN) $<
The xsltproc binary is found in the xsltproc package. The XSL files are in
docbook-xsl. A description of the parameters you can use can be found in the
docbook-xsl-doc-* packages. Please remember that if you create the nroff
version in one of the debian/rules file targets (such as build), you will need
to include xsltproc and docbook-xsl in your Build-Depends control field.
Alternatively use the xmlto command/package. That will also automatically
pull in xsltproc and docbook-xsl.
Notes for using docbook2x: docbook2x-man does not automatically create the
AUTHOR(S) and COPYRIGHT sections. In this case, please add them manually as
<refsect1> ... </refsect1>.
To disable the automatic creation of the AUTHOR(S) and COPYRIGHT sections
read /usr/share/doc/docbook-xsl/doc/manpages/authors.html. This file can be
found in the docbook-xsl-doc-html package.
Validation can be done using: `xmllint -''-noout -''-valid manpage.xml`
General documentation about man-pages and man-page-formatting:
man(1), man(7), http://www.tldp.org/HOWTO/Man-Page/
-->
<!-- Fill in your name for FIRSTNAME and SURNAME. -->
<!ENTITY dhfirstname "FIRSTNAME">
<!ENTITY dhsurname "SURNAME">
<!-- dhusername could also be set to "&dhfirstname; &dhsurname;". -->
<!ENTITY dhusername "Chris">
<!ENTITY dhemail "chris@unknown">
<!-- SECTION should be 1-8, maybe w/ subsection other parameters are
allowed: see man(7), man(1) and
http://www.tldp.org/HOWTO/Man-Page/q2.html. -->
<!ENTITY dhsection "SECTION">
<!-- TITLE should be something like "User commands" or similar (see
http://www.tldp.org/HOWTO/Man-Page/q2.html). -->
<!ENTITY dhtitle "osvr-rendermanager User Manual">
<!ENTITY dhucpackage "Osvr-rendermanager">
<!ENTITY dhpackage "osvr-rendermanager">
]>
<refentry>
<refentryinfo>
<title>&dhtitle;</title>
<productname>&dhpackage;</productname>
<authorgroup>
<author>
<firstname>&dhfirstname;</firstname>
<surname>&dhsurname;</surname>
<contrib>Wrote this manpage for the Debian system.</contrib>
<address>
<email>&dhemail;</email>
</address>
</author>
</authorgroup>
<copyright>
<year>2007</year>
<holder>&dhusername;</holder>
</copyright>
<legalnotice>
<para>This manual page was written for the Debian system
(and may be used by others).</para>
<para>Permission is granted to copy, distribute and/or modify this
document under the terms of the GNU General Public License,
Version 2 or (at your option) any later version published by
the Free Software Foundation.</para>
<para>On Debian systems, the complete text of the GNU General Public
License can be found in
<filename>/usr/share/common-licenses/GPL</filename>.</para>
</legalnotice>
</refentryinfo>
<refmeta>
<refentrytitle>&dhucpackage;</refentrytitle>
<manvolnum>&dhsection;</manvolnum>
</refmeta>
<refnamediv>
<refname>&dhpackage;</refname>
<refpurpose>program to do something</refpurpose>
</refnamediv>
<refsynopsisdiv>
<cmdsynopsis>
<command>&dhpackage;</command>
<!-- These are several examples, how syntaxes could look -->
<arg choice="plain"><option>-e <replaceable>this</replaceable></option></arg>
<arg choice="opt"><option>--example=<parameter>that</parameter></option></arg>
<arg choice="opt">
<group choice="req">
<arg choice="plain"><option>-e</option></arg>
<arg choice="plain"><option>--example</option></arg>
</group>
<replaceable class="option">this</replaceable>
</arg>
<arg choice="opt">
<group choice="req">
<arg choice="plain"><option>-e</option></arg>
<arg choice="plain"><option>--example</option></arg>
</group>
<group choice="req">
<arg choice="plain"><replaceable>this</replaceable></arg>
<arg choice="plain"><replaceable>that</replaceable></arg>
</group>
</arg>
</cmdsynopsis>
<cmdsynopsis>
<command>&dhpackage;</command>
<!-- Normally the help and version options make the programs stop
right after outputting the requested information. -->
<group choice="opt">
<arg choice="plain">
<group choice="req">
<arg choice="plain"><option>-h</option></arg>
<arg choice="plain"><option>--help</option></arg>
</group>
</arg>
<arg choice="plain">
<group choice="req">
<arg choice="plain"><option>-v</option></arg>
<arg choice="plain"><option>--version</option></arg>
</group>
</arg>
</group>
</cmdsynopsis>
</refsynopsisdiv>
<refsect1 id="description">
<title>DESCRIPTION</title>
<para>This manual page documents briefly the
<command>&dhpackage;</command> and <command>bar</command>
commands.</para>
<para>This manual page was written for the Debian distribution
because the original program does not have a manual page.
Instead, it has documentation in the GNU <citerefentry>
<refentrytitle>info</refentrytitle>
<manvolnum>1</manvolnum>
</citerefentry> format; see below.</para>
<para><command>&dhpackage;</command> is a program that...</para>
</refsect1>
<refsect1 id="options">
<title>OPTIONS</title>
<para>The program follows the usual GNU command line syntax,
with long options starting with two dashes (`-'). A summary of
options is included below. For a complete description, see the
<citerefentry>
<refentrytitle>info</refentrytitle>
<manvolnum>1</manvolnum>
</citerefentry> files.</para>
<variablelist>
<!-- Use the variablelist.term.separator and the
variablelist.term.break.after parameters to
control the term elements. -->
<varlistentry>
<term><option>-e <replaceable>this</replaceable></option></term>
<term><option>--example=<replaceable>that</replaceable></option></term>
<listitem>
<para>Does this and that.</para>
</listitem>
</varlistentry>
<varlistentry>
<term><option>-h</option></term>
<term><option>--help</option></term>
<listitem>
<para>Show summary of options.</para>
</listitem>
</varlistentry>
<varlistentry>
<term><option>-v</option></term>
<term><option>--version</option></term>
<listitem>
<para>Show version of program.</para>
</listitem>
</varlistentry>
</variablelist>
</refsect1>
<refsect1 id="files">
<title>FILES</title>
<variablelist>
<varlistentry>
<term><filename>/etc/foo.conf</filename></term>
<listitem>
<para>The system-wide configuration file to control the
behaviour of <application>&dhpackage;</application>. See
<citerefentry>
<refentrytitle>foo.conf</refentrytitle>
<manvolnum>5</manvolnum>
</citerefentry> for further details.</para>
</listitem>
</varlistentry>
<varlistentry>
<term><filename>${HOME}/.foo.conf</filename></term>
<listitem>
<para>The per-user configuration file to control the
behaviour of <application>&dhpackage;</application>. See
<citerefentry>
<refentrytitle>foo.conf</refentrytitle>
<manvolnum>5</manvolnum>
</citerefentry> for further details.</para>
</listitem>
</varlistentry>
</variablelist>
</refsect1>
<refsect1 id="environment">
<title>ENVIRONMENT</title>
<variablelist>
<varlistentry>
<term><envar>FOO_CONF</envar></term>
<listitem>
<para>If used, the defined file is used as configuration
file (see also <xref linkend="files"/>).</para>
</listitem>
</varlistentry>
</variablelist>
</refsect1>
<refsect1 id="diagnostics">
<title>DIAGNOSTICS</title>
<para>The following diagnostics may be issued
on <filename class="devicefile">stderr</filename>:</para>
<variablelist>
<varlistentry>
<term><errortext>Bad configuration file. Exiting.</errortext></term>
<listitem>
<para>The configuration file seems to contain a broken configuration
line. Use the <option>--verbose</option> option, to get more info.
</para>
</listitem>
</varlistentry>
</variablelist>
<para><command>&dhpackage;</command> provides some return codes, that can
be used in scripts:</para>
<segmentedlist>
<segtitle>Code</segtitle>
<segtitle>Diagnostic</segtitle>
<seglistitem>
<seg><errorcode>0</errorcode></seg>
<seg>Program exited successfully.</seg>
</seglistitem>
<seglistitem>
<seg><errorcode>1</errorcode></seg>
<seg>The configuration file seems to be broken.</seg>
</seglistitem>
</segmentedlist>
</refsect1>
<refsect1 id="bugs">
<!-- Or use this section to tell about upstream BTS. -->
<title>BUGS</title>
<para>The program is currently limited to only work
with the <package>foobar</package> library.</para>
<para>The upstreams <acronym>BTS</acronym> can be found
at <ulink url="http://bugzilla.foo.tld"/>.</para>
</refsect1>
<refsect1 id="see_also">
<title>SEE ALSO</title>
<!-- In alpabetical order. -->
<para><citerefentry>
<refentrytitle>bar</refentrytitle>
<manvolnum>1</manvolnum>
</citerefentry>, <citerefentry>
<refentrytitle>baz</refentrytitle>
<manvolnum>1</manvolnum>
</citerefentry>, <citerefentry>
<refentrytitle>foo.conf</refentrytitle>
<manvolnum>5</manvolnum>
</citerefentry></para>
<para>The programs are documented fully by <citetitle>The Rise and
Fall of a Fooish Bar</citetitle> available via the <citerefentry>
<refentrytitle>info</refentrytitle>
<manvolnum>1</manvolnum>
</citerefentry> system.</para>
</refsect1>
</refentry>
| 37.777397 | 84 | 0.632853 |
034216a0c0f477076a318b5d787c203da4dbb1a3 | 1,482 | exs | Elixir | priv/repo/migrations/002_create_user.exs | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 44 | 2018-05-09T01:08:57.000Z | 2021-01-19T07:25:26.000Z | priv/repo/migrations/002_create_user.exs | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 36 | 2018-05-08T23:59:54.000Z | 2018-09-28T13:50:30.000Z | priv/repo/migrations/002_create_user.exs | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 9 | 2018-05-09T14:09:19.000Z | 2021-03-21T21:04:04.000Z | defmodule BlueJet.Repo.Migrations.CreateUser do
use Ecto.Migration
def change do
create table(:users, primary_key: false) do
add :id, :binary_id, primary_key: true
add :account_id, references(:accounts, type: :binary_id, on_delete: :delete_all)
add :default_account_id, references(:accounts, type: :binary_id), null: false
add :status, :string
add :username, :string, null: false
add :email, :string
add :phone_number, :string
add :encrypted_password, :string
add :name, :string
add :first_name, :string
add :last_name, :string
add :auth_method, :string, null: false
add :tfa_code, :string
add :tfa_code_expires_at, :utc_datetime
add :email_verification_token, :string
add :email_verification_token_expires_at, :utc_datetime
add :email_verified, :boolean, null: false
add :email_verified_at, :utc_datetime
add :password_reset_token, :string
add :password_reset_token_expires_at, :utc_datetime
add :password_updated_at, :utc_datetime
timestamps()
end
create unique_index(:users, [:email], where: "account_id IS NULL")
create unique_index(:users, [:username], where: "account_id IS NULL")
create unique_index(:users, [:password_reset_token])
create unique_index(:users, [:email_verification_token])
create unique_index(:users, [:account_id, :username])
create index(:users, [:account_id, :status])
end
end
| 33.681818 | 86 | 0.694332 |
034221675fbffe614a4555c82325534479a0a994 | 76 | ex | Elixir | lib/working_hours/repo.ex | n2o/WorkingHours | 07987bf4dab62c227928273c18d57cd92f55ed07 | [
"MIT"
] | null | null | null | lib/working_hours/repo.ex | n2o/WorkingHours | 07987bf4dab62c227928273c18d57cd92f55ed07 | [
"MIT"
] | null | null | null | lib/working_hours/repo.ex | n2o/WorkingHours | 07987bf4dab62c227928273c18d57cd92f55ed07 | [
"MIT"
] | null | null | null | defmodule WorkingHours.Repo do
use Ecto.Repo, otp_app: :working_hours
end
| 19 | 40 | 0.802632 |
034230a8e4fd8167e1593e07448649db359f4654 | 979 | exs | Elixir | test/static_qr/static_qr_test.exs | anfly0/exBankID | 8e62f4250b533543a6b4b423b0c2fb3ec262dc7b | [
"MIT"
] | 15 | 2020-08-05T11:20:03.000Z | 2021-09-22T15:20:41.000Z | test/static_qr/static_qr_test.exs | anfly0/exBankID | 8e62f4250b533543a6b4b423b0c2fb3ec262dc7b | [
"MIT"
] | 9 | 2020-08-10T14:42:27.000Z | 2021-06-14T14:45:05.000Z | test/static_qr/static_qr_test.exs | anfly0/exBankID | 8e62f4250b533543a6b4b423b0c2fb3ec262dc7b | [
"MIT"
] | 2 | 2020-08-10T13:42:15.000Z | 2020-10-03T04:10:06.000Z | defmodule Test.Static.Qr do
use ExUnit.Case, async: true
test "Static qr-code from auth response" do
response = %ExBankID.Auth.Response{
orderRef: "131daac9-16c6-4618-beb0-365768f37288",
autoStartToken: "7c40b5c9-fa74-49cf-b98c-bfe651f9a7c6",
qrStartToken: "67df3917-fa0d-44e5-b327-edcc928297f8",
qrStartSecret: "d28db9a7-4cde-429e-a983-359be676944c"
}
assert "bankid:///?autostarttoken=7c40b5c9-fa74-49cf-b98c-bfe651f9a7c6" =
ExBankID.static_qr(response)
end
test "Static qr-code from sign response" do
response = %ExBankID.Sign.Response{
orderRef: "131daac9-16c6-4618-beb0-365768f37288",
autoStartToken: "7c40b5c9-fa74-49cf-b98c-bfe651f9a7c6",
qrStartToken: "67df3917-fa0d-44e5-b327-edcc928297f8",
qrStartSecret: "d28db9a7-4cde-429e-a983-359be676944c"
}
assert "bankid:///?autostarttoken=7c40b5c9-fa74-49cf-b98c-bfe651f9a7c6" =
ExBankID.static_qr(response)
end
end
| 34.964286 | 77 | 0.708887 |
03423b8fa7c7eb98fd1e2d1ae3060c6be49da19c | 212 | exs | Elixir | v02/ch02/struct1.edit3.exs | oiax/elixir-primer | c8b89a29f108cc335b8e1341b7a1e90ec12adc66 | [
"MIT"
] | null | null | null | v02/ch02/struct1.edit3.exs | oiax/elixir-primer | c8b89a29f108cc335b8e1341b7a1e90ec12adc66 | [
"MIT"
] | null | null | null | v02/ch02/struct1.edit3.exs | oiax/elixir-primer | c8b89a29f108cc335b8e1341b7a1e90ec12adc66 | [
"MIT"
] | null | null | null | m = %{name: "foo", email: "[email protected]"}
m = Map.merge(m, %{email: "[email protected]"})
u = %User{name: "foo", email: "[email protected]"}
u = Map.merge(u, %{email: "[email protected]"})
IO.inspect m
IO.inspect u
| 30.285714 | 48 | 0.622642 |
03423e01d187d2789be11902c0f98c61f20030df | 700 | ex | Elixir | web/gettext.ex | joeletizia/blogger | 64b7b5665cdd75cbf24f5cfd938faf5135eb914a | [
"MIT"
] | null | null | null | web/gettext.ex | joeletizia/blogger | 64b7b5665cdd75cbf24f5cfd938faf5135eb914a | [
"MIT"
] | null | null | null | web/gettext.ex | joeletizia/blogger | 64b7b5665cdd75cbf24f5cfd938faf5135eb914a | [
"MIT"
] | null | null | null | defmodule Blogger.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](http://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import Blogger.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](http://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :blogger
end
| 28 | 71 | 0.677143 |
0342cc749e7f72fee6ee67f044cee9d144733497 | 1,571 | ex | Elixir | clients/big_query_connection/lib/google_api/big_query_connection/v1beta1/model/empty.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/big_query_connection/lib/google_api/big_query_connection/v1beta1/model/empty.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/big_query_connection/lib/google_api/big_query_connection/v1beta1/model/empty.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigQueryConnection.V1beta1.Model.Empty do
@moduledoc """
A generic empty message that you can re-use to avoid defining duplicated empty messages in your APIs. A typical example is to use it as the request or the response type of an API method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The JSON representation for `Empty` is empty JSON object `{}`.
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder, for: GoogleApi.BigQueryConnection.V1beta1.Model.Empty do
def decode(value, options) do
GoogleApi.BigQueryConnection.V1beta1.Model.Empty.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQueryConnection.V1beta1.Model.Empty do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.404762 | 345 | 0.764481 |
034321a2ea84a7c2fcb5ba1fe5e0a4060a7ea1e3 | 2,137 | exs | Elixir | mix.exs | alanvardy/phoenix_live_view | f5c18a5d5682850050072054397da024371aefca | [
"MIT"
] | null | null | null | mix.exs | alanvardy/phoenix_live_view | f5c18a5d5682850050072054397da024371aefca | [
"MIT"
] | null | null | null | mix.exs | alanvardy/phoenix_live_view | f5c18a5d5682850050072054397da024371aefca | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveView.MixProject do
use Mix.Project
@version "0.5.2"
def project do
[
app: :phoenix_live_view,
version: @version,
elixir: "~> 1.7",
start_permanent: Mix.env() == :prod,
elixirc_paths: elixirc_paths(Mix.env()),
package: package(),
xref: [exclude: [Floki]],
deps: deps(),
docs: docs(),
homepage_url: "http://www.phoenixframework.org",
description: """
Rich, real-time user experiences with server-rendered HTML
"""
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Run "mix help compile.app" to learn about applications.
def application do
[
mod: {Phoenix.LiveView.Application, []},
extra_applications: [:logger]
]
end
defp deps do
[
{:phoenix, "~> 1.4.9"},
{:phoenix_html, "~> 2.13.2"},
{:jason, "~> 1.0", optional: true},
{:ex_doc, "~> 0.20", only: :docs},
{:floki, "~> 0.24.0", only: :test}
]
end
defp docs do
[
main: "Phoenix.LiveView",
source_ref: "v#{@version}",
source_url: "https://github.com/phoenixframework/phoenix_live_view",
extra_section: "GUIDES",
extras: extras(),
groups_for_extras: groups_for_extras(),
groups_for_modules: groups_for_modules()
]
end
defp extras do
[
"guides/introduction/installation.md"
]
end
defp groups_for_extras do
[
Introduction: ~r/guides\/introduction\/.?/
]
end
defp groups_for_modules do
[
"Live EEx Engine": [
Phoenix.LiveView.Engine,
Phoenix.LiveView.Component,
Phoenix.LiveView.Rendered,
Phoenix.LiveView.Comprehension
]
]
end
defp package do
[
maintainers: ["Chris McCord", "José Valim", "Gary Rennie", "Alex Garibay", "Scott Newcomer"],
licenses: ["MIT"],
links: %{github: "https://github.com/phoenixframework/phoenix_live_view"},
files:
~w(assets/css assets/js lib priv) ++
~w(CHANGELOG.md LICENSE.md mix.exs package.json README.md)
]
end
end
| 23.483516 | 99 | 0.590547 |
03432d05efa9311235d0971fab2849b3cb147180 | 1,555 | ex | Elixir | clients/content/lib/google_api/content/v2/model/orders_update_merchant_order_id_request.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/orders_update_merchant_order_id_request.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/orders_update_merchant_order_id_request.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Content.V2.Model.OrdersUpdateMerchantOrderIdRequest do
@moduledoc """
## Attributes
- merchantOrderId (String): The merchant order id to be assigned to the order. Must be unique per merchant. Defaults to: `null`.
- operationId (String): The ID of the operation. Unique across all operations for a given order. Defaults to: `null`.
"""
defstruct [
:"merchantOrderId",
:"operationId"
]
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.OrdersUpdateMerchantOrderIdRequest do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.OrdersUpdateMerchantOrderIdRequest do
def encode(value, options) do
GoogleApi.Content.V2.Deserializer.serialize_non_nil(value, options)
end
end
| 32.395833 | 130 | 0.758199 |
0343333f1d1862c8e702320e799a413e922a2dbc | 966 | ex | Elixir | test/support/channel_case.ex | mstang/people_sorter | e712622c071748c79b26e977a8b029e1ba1877c5 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | mstang/people_sorter | e712622c071748c79b26e977a8b029e1ba1877c5 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | mstang/people_sorter | e712622c071748c79b26e977a8b029e1ba1877c5 | [
"MIT"
] | null | null | null | defmodule PeopleSorterWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use PeopleSorterWeb.ChannelCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
import Phoenix.ChannelTest
import PeopleSorterWeb.ChannelCase
# The default endpoint for testing
@endpoint PeopleSorterWeb.Endpoint
end
end
setup _tags do
:ok
end
end
| 27.6 | 69 | 0.744306 |
0343388e6adb0e388bc15e575b9eed4567b59e7c | 470 | exs | Elixir | config/dev.exs | fremantle-capital/ex_okex | 1e5a4ac8c3713a0bfcd8fffc20dce1a9eaacf931 | [
"MIT"
] | 6 | 2018-12-04T22:05:05.000Z | 2022-01-08T11:54:40.000Z | config/dev.exs | fremantle-capital/ex_okex | 1e5a4ac8c3713a0bfcd8fffc20dce1a9eaacf931 | [
"MIT"
] | 14 | 2019-12-21T11:32:30.000Z | 2022-03-28T16:07:01.000Z | config/dev.exs | acuityinnovations/ex_okex | 52acf16e8d00446ca32607ccb2cd75add0acaceb | [
"MIT"
] | 5 | 2019-04-10T00:41:17.000Z | 2021-12-23T14:49:02.000Z | use Mix.Config
# Read from environment variables
config :ex_okex,
api_key: System.get_env("OKEX_API_KEY"),
api_secret: System.get_env("OKEX_API_SECRET"),
api_passphrase: System.get_env("OKEX_API_PASSPHRASE")
# Or replace "OKEX_*" values to define here in config file
# config :ex_okex, api_key: {:system, "OKEX_API_KEY"},
# api_secret: {:system, "OKEX_API_SECRET"},
# api_passphrase: {:system, "OKEX_API_PASSPHRASE"}
| 36.153846 | 67 | 0.682979 |
03433bdb1ce48f177d189e005b3df90cee6a1e96 | 388 | ex | Elixir | lib/rockelivery/user/actions/update.ex | joabehenrique/rockelivery | 02f83fe99df8e27d4e2a6270fecb39a521f2ceaf | [
"MIT"
] | null | null | null | lib/rockelivery/user/actions/update.ex | joabehenrique/rockelivery | 02f83fe99df8e27d4e2a6270fecb39a521f2ceaf | [
"MIT"
] | null | null | null | lib/rockelivery/user/actions/update.ex | joabehenrique/rockelivery | 02f83fe99df8e27d4e2a6270fecb39a521f2ceaf | [
"MIT"
] | null | null | null | defmodule Rockelivery.User.Actions.Update do
alias Rockelivery.{Error, Repo}
alias Rockelivery.User.User
def call(%{"id" => id} = params) do
case Repo.get(User, id) do
nil -> {:error, Error.build_user_not_found_error()}
user -> update(user, params)
end
end
defp update(user, params) do
user
|> User.changeset(params)
|> Repo.update()
end
end
| 21.555556 | 57 | 0.649485 |
03433ead72a93d58138e69e3ab7e74d7268e11cb | 1,257 | exs | Elixir | config/prod.secret.exs | andsleonardo/magnemite | 2a06c1520defeb193d718313ad3fc6a50349bc8d | [
"MIT"
] | null | null | null | config/prod.secret.exs | andsleonardo/magnemite | 2a06c1520defeb193d718313ad3fc6a50349bc8d | [
"MIT"
] | null | null | null | config/prod.secret.exs | andsleonardo/magnemite | 2a06c1520defeb193d718313ad3fc6a50349bc8d | [
"MIT"
] | null | null | null | # In this file, we load production configuration and secrets
# from environment variables. You can also hardcode secrets,
# although such is generally not recommended and you have to
# remember to add this file to your .gitignore.
use Mix.Config
database_url =
System.get_env("DATABASE_URL") ||
raise """
environment variable DATABASE_URL is missing.
For example: ecto://USER:PASS@HOST/DATABASE
"""
config :magnemite, Magnemite.Repo,
# ssl: true,
url: database_url,
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
config :magnemite, MagnemiteWeb.Endpoint,
http: [
port: String.to_integer(System.get_env("PORT") || "4000"),
transport_options: [socket_opts: [:inet6]]
],
secret_key_base: secret_key_base
# ## Using releases (Elixir v1.9+)
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start each relevant endpoint:
#
# config :magnemite, MagnemiteWeb.Endpoint, server: true
#
# Then you can assemble a release by calling `mix release`.
# See `mix help release` for more information.
| 29.928571 | 67 | 0.721559 |
034369038941728620cfe66bf8be7567262e88e3 | 44,343 | ex | Elixir | lib/ecto/repo.ex | leandrocp/ecto | fc4afd2c8049c26a5fdb420d895060bb4a37d4d5 | [
"Apache-2.0"
] | null | null | null | lib/ecto/repo.ex | leandrocp/ecto | fc4afd2c8049c26a5fdb420d895060bb4a37d4d5 | [
"Apache-2.0"
] | null | null | null | lib/ecto/repo.ex | leandrocp/ecto | fc4afd2c8049c26a5fdb420d895060bb4a37d4d5 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Repo do
@moduledoc """
Defines a repository.
A repository maps to an underlying data store, controlled by the
adapter. For example, Ecto ships with a Postgres adapter that
stores data into a PostgreSQL database.
When used, the repository expects the `:otp_app` as option.
The `:otp_app` should point to an OTP application that has
the repository configuration. For example, the repository:
defmodule Repo do
use Ecto.Repo,
otp_app: :my_app,
adapter: Ecto.Adapters.Postgres
end
Could be configured with:
config :my_app, Repo,
database: "ecto_simple",
username: "postgres",
password: "postgres",
hostname: "localhost"
Most of the configuration that goes into the `config` is specific
to the adapter, so check `Ecto.Adapters.Postgres` documentation
for more information. However, some configuration is shared across
all adapters, they are:
* `:name`- The name of the Repo supervisor process
* `:priv` - the directory where to keep repository data, like
migrations, schema and more. Defaults to "priv/YOUR_REPO".
It must always point to a subdirectory inside the priv directory.
* `:url` - an URL that specifies storage information. Read below
for more information
* `:log` - the log level used when logging the query with Elixir's
Logger. If false, disables logging for that repository.
Defaults to `:debug`.
* `:telemetry_prefix` - we recommend adapters to publish events
using the `Telemetry` library. By default, the telemetry prefix
is based on the module name, so if your module is called
`MyApp.Repo`, the prefix will be `[:my_app, :repo]`. See the
"Telemetry Events" section to see which events we recommend
adapters to publish
## URLs
Repositories by default support URLs. For example, the configuration
above could be rewritten to:
config :my_app, Repo,
url: "ecto://postgres:postgres@localhost/ecto_simple"
The schema can be of any value. The path represents the database name
while options are simply merged in.
URL can include query parameters to override shared and adapter-specific
options `ssl`, `timeout`, `pool_size`:
config :my_app, Repo,
url: "ecto://postgres:postgres@localhost/ecto_simple?ssl=true&pool_size=10"
In case the URL needs to be dynamically configured, for example by
reading a system environment variable, such can be done via the
`c:init/2` repository callback:
def init(_type, config) do
{:ok, Keyword.put(config, :url, System.get_env("DATABASE_URL"))}
end
## Shared options
Almost all of the repository operations below accept the following
options:
* `:timeout` - The time in milliseconds to wait for the query call to
finish, `:infinity` will wait indefinitely (default: 15000);
* `:log` - When false, does not log the query
* `:telemetry_event` - The telemetry event name to dispatch the event under
Such cases will be explicitly documented as well as any extra option.
## Telemetry events
We recommend adapters to publish certain `Telemetry` events listed below.
Those events will use the `:telemetry_prefix` outlined above which defaults
to `[:my_app, :repo]`.
For instance, to receive all query events published by a repository called
`MyApp.Repo`, one would define a module:
defmodule MyApp.Telemetry do
def handle_event([:my_app, :repo, :query], time, metadata, config) do
IO.inspect binding()
end
end
and then attach this module to each event on your Application start callback:
Telemetry.attach("my-app-handler", [:my_app, :repo, :query], MyApp.Telemetry, :handle_event,%{})
Below we list all events developers should expect. All examples below consider
a repository named `MyApp.Repo`:
* `[:my_app, :repo, :query]` - should be invoked on every query send
to the adapter, including queries that are related to the transaction
management. The measurement will be the time necessary to run the query
including queue and encoding time. The metadata is a map where we recommend
developers to pass at least the same keys as found in the `Ecto.LogEntry`
struct
"""
@type t :: module
@doc false
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@behaviour Ecto.Repo
{otp_app, adapter, behaviours} = Ecto.Repo.Supervisor.compile_config(__MODULE__, opts)
@otp_app otp_app
@adapter adapter
@before_compile adapter
def config do
{:ok, config} = Ecto.Repo.Supervisor.runtime_config(:runtime, __MODULE__, @otp_app, [])
config
end
def __adapter__ do
@adapter
end
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :supervisor
}
end
def start_link(opts \\ []) do
Ecto.Repo.Supervisor.start_link(__MODULE__, @otp_app, @adapter, opts)
end
def stop(timeout \\ 5000) do
Supervisor.stop(__MODULE__, :normal, timeout)
end
def load(schema_or_types, data) do
Ecto.Repo.Schema.load(@adapter, schema_or_types, data)
end
def checkout(fun, opts \\ []) when is_function(fun) do
{adapter, meta} = Ecto.Repo.Registry.lookup(__MODULE__)
adapter.checkout(meta, opts, fun)
end
## Transactions
if Ecto.Adapter.Transaction in behaviours do
def transaction(fun_or_multi, opts \\ []) do
Ecto.Repo.Transaction.transaction(__MODULE__, fun_or_multi, opts)
end
def in_transaction? do
Ecto.Repo.Transaction.in_transaction?(__MODULE__)
end
@spec rollback(term) :: no_return
def rollback(value) do
Ecto.Repo.Transaction.rollback(__MODULE__, value)
end
end
## Schemas
if Ecto.Adapter.Schema in behaviours do
def insert(struct, opts \\ []) do
Ecto.Repo.Schema.insert(__MODULE__, struct, opts)
end
def update(struct, opts \\ []) do
Ecto.Repo.Schema.update(__MODULE__, struct, opts)
end
def insert_or_update(changeset, opts \\ []) do
Ecto.Repo.Schema.insert_or_update(__MODULE__, changeset, opts)
end
def delete(struct, opts \\ []) do
Ecto.Repo.Schema.delete(__MODULE__, struct, opts)
end
def insert!(struct, opts \\ []) do
Ecto.Repo.Schema.insert!(__MODULE__, struct, opts)
end
def update!(struct, opts \\ []) do
Ecto.Repo.Schema.update!(__MODULE__, struct, opts)
end
def insert_or_update!(changeset, opts \\ []) do
Ecto.Repo.Schema.insert_or_update!(__MODULE__, changeset, opts)
end
def delete!(struct, opts \\ []) do
Ecto.Repo.Schema.delete!(__MODULE__, struct, opts)
end
def insert_all(schema_or_source, entries, opts \\ []) do
Ecto.Repo.Schema.insert_all(__MODULE__, schema_or_source, entries, opts)
end
end
## Queryable
if Ecto.Adapter.Queryable in behaviours do
def update_all(queryable, updates, opts \\ []) do
Ecto.Repo.Queryable.update_all(__MODULE__, queryable, updates, opts)
end
def delete_all(queryable, opts \\ []) do
Ecto.Repo.Queryable.delete_all(__MODULE__, queryable, opts)
end
def all(queryable, opts \\ []) do
Ecto.Repo.Queryable.all(__MODULE__, queryable, opts)
end
def stream(queryable, opts \\ []) do
Ecto.Repo.Queryable.stream(__MODULE__, queryable, opts)
end
def get(queryable, id, opts \\ []) do
Ecto.Repo.Queryable.get(__MODULE__, queryable, id, opts)
end
def get!(queryable, id, opts \\ []) do
Ecto.Repo.Queryable.get!(__MODULE__, queryable, id, opts)
end
def get_by(queryable, clauses, opts \\ []) do
Ecto.Repo.Queryable.get_by(__MODULE__, queryable, clauses, opts)
end
def get_by!(queryable, clauses, opts \\ []) do
Ecto.Repo.Queryable.get_by!(__MODULE__, queryable, clauses, opts)
end
def one(queryable, opts \\ []) do
Ecto.Repo.Queryable.one(__MODULE__, queryable, opts)
end
def one!(queryable, opts \\ []) do
Ecto.Repo.Queryable.one!(__MODULE__, queryable, opts)
end
def aggregate(queryable, aggregate, field, opts \\ [])
when aggregate in [:count, :avg, :max, :min, :sum] and is_atom(field) do
Ecto.Repo.Queryable.aggregate(__MODULE__, queryable, aggregate, field, opts)
end
def exists?(queryable, opts \\ []) do
Ecto.Repo.Queryable.exists?(__MODULE__, queryable, opts)
end
def preload(struct_or_structs_or_nil, preloads, opts \\ []) do
Ecto.Repo.Preloader.preload(struct_or_structs_or_nil, __MODULE__, preloads, opts)
end
end
end
end
## User callbacks
@optional_callbacks init: 2
@doc """
A callback executed when the repo starts or when configuration is read.
The first argument is the context the callback is being invoked. If it
is called because the Repo supervisor is starting, it will be `:supervisor`.
It will be `:runtime` if it is called for reading configuration without
actually starting a process.
The second argument is the repository configuration as stored in the
application environment. It must return `{:ok, keyword}` with the updated
list of configuration or `:ignore` (only in the `:supervisor` case).
"""
@callback init(:supervisor | :runtime, config :: Keyword.t()) :: {:ok, Keyword.t()} | :ignore
## Ecto.Adapter
@doc """
Returns the adapter tied to the repository.
"""
@callback __adapter__ :: Ecto.Adapter.t()
@doc """
Returns the adapter configuration stored in the `:otp_app` environment.
If the `c:init/2` callback is implemented in the repository,
it will be invoked with the first argument set to `:runtime`.
"""
@callback config() :: Keyword.t()
@doc """
Starts any connection pooling or supervision and return `{:ok, pid}`
or just `:ok` if nothing needs to be done.
Returns `{:error, {:already_started, pid}}` if the repo is already
started or `{:error, term}` in case anything else goes wrong.
## Options
See the configuration in the moduledoc for options shared between adapters,
for adapter-specific configuration see the adapter's documentation.
"""
@callback start_link(opts :: Keyword.t()) ::
{:ok, pid}
| {:error, {:already_started, pid}}
| {:error, term}
@doc """
Shuts down the repository.
"""
@callback stop(timeout) :: :ok
@doc """
Checks out a connection for the duration of the function.
It returns the result of the function. This is useful when
you need to perform multiple operations against the repository
in a row and you want to avoid checking out the connection
multiple times.
`checkout/2` and `transaction/2` can be combined and nested
multiple times. If `checkout/2` is called inside the function
of another `checkout/2` call, the function is simply executed,
without checking out a new connection.
## Options
See the "Shared options" section at the module documentation.
"""
@callback checkout((() -> result), opts :: Keyword.t()) :: result when result: var
@doc """
Loads `data` into a struct or a map.
The first argument can be a a schema module, or a
map (of types) and determines the return value:
a struct or a map, respectively.
The second argument `data` specifies fields and values that are to be loaded.
It can be a map, a keyword list, or a `{fields, values}` tuple.
Fields can be atoms or strings.
Fields that are not present in the schema (or `types` map) are ignored.
If any of the values has invalid type, an error is raised.
## Examples
iex> MyRepo.load(User, %{name: "Alice", age: 25})
%User{name: "Alice", age: 25}
iex> MyRepo.load(User, [name: "Alice", age: 25])
%User{name: "Alice", age: 25}
`data` can also take form of `{fields, values}`:
iex> MyRepo.load(User, {[:name, :age], ["Alice", 25]})
%User{name: "Alice", age: 25, ...}
The first argument can also be a `types` map:
iex> types = %{name: :string, age: :integer}
iex> MyRepo.load(types, %{name: "Alice", age: 25})
%{name: "Alice", age: 25}
This function is especially useful when parsing raw query results:
iex> result = Ecto.Adapters.SQL.query!(MyRepo, "SELECT * FROM users", [])
iex> Enum.map(result.rows, &MyRepo.load(User, {result.columns, &1}))
[%User{...}, ...]
"""
@callback load(
module_or_map :: module | map(),
data :: map() | Keyword.t() | {list, list}
) :: Ecto.Schema.t() | map()
## Ecto.Adapter.Queryable
@optional_callbacks get: 3, get!: 3, get_by: 3, get_by!: 3, aggregate: 4, exists?: 2,
one: 2, one!: 2, preload: 3, all: 2, stream: 2, update_all: 3, delete_all: 2
@doc """
Fetches a single struct from the data store where the primary key matches the
given id.
Returns `nil` if no result was found. If the struct in the queryable
has no or more than one primary key, it will raise an argument error.
## Options
See the "Shared options" section at the module documentation.
## Example
MyRepo.get(Post, 42)
"""
@callback get(queryable :: Ecto.Queryable.t(), id :: term, opts :: Keyword.t()) ::
Ecto.Schema.t() | nil
@doc """
Similar to `c:get/3` but raises `Ecto.NoResultsError` if no record was found.
## Options
See the "Shared options" section at the module documentation.
## Example
MyRepo.get!(Post, 42)
"""
@callback get!(queryable :: Ecto.Queryable.t(), id :: term, opts :: Keyword.t()) ::
Ecto.Schema.t() | nil
@doc """
Fetches a single result from the query.
Returns `nil` if no result was found. Raises if more than one entry.
## Options
See the "Shared options" section at the module documentation.
## Example
MyRepo.get_by(Post, title: "My post")
"""
@callback get_by(
queryable :: Ecto.Queryable.t(),
clauses :: Keyword.t() | map,
opts :: Keyword.t()
) :: Ecto.Schema.t() | nil
@doc """
Similar to `get_by/3` but raises `Ecto.NoResultsError` if no record was found.
Raises if more than one entry.
## Options
See the "Shared options" section at the module documentation.
## Example
MyRepo.get_by!(Post, title: "My post")
"""
@callback get_by!(
queryable :: Ecto.Queryable.t(),
clauses :: Keyword.t() | map,
opts :: Keyword.t()
) :: Ecto.Schema.t() | nil
@doc """
Calculate the given `aggregate` over the given `field`.
If the query has a limit, offset or distinct set, it will be
automatically wrapped in a subquery in order to return the
proper result.
Any preload or select in the query will be ignored in favor of
the column being aggregated.
The aggregation will fail if any `group_by` field is set.
## Options
See the "Shared options" section at the module documentation.
## Examples
# Returns the number of visits per blog post
Repo.aggregate(Post, :count, :visits)
# Returns the average number of visits for the top 10
query = from Post, limit: 10
Repo.aggregate(query, :avg, :visits)
"""
@callback aggregate(
queryable :: Ecto.Queryable.t(),
aggregate :: :avg | :count | :max | :min | :sum,
field :: atom,
opts :: Keyword.t()
) :: term | nil
@doc """
Checks if there exists an entry that matches the given query.
Returns a boolean.
## Options
See the "Shared options" section at the module documentation.
"""
@callback exists?(queryable :: Ecto.Queryable.t(), opts :: Keyword.t()) :: boolean()
@doc """
Fetches a single result from the query.
Returns `nil` if no result was found. Raises if more than one entry.
## Options
See the "Shared options" section at the module documentation.
"""
@callback one(queryable :: Ecto.Queryable.t(), opts :: Keyword.t()) ::
Ecto.Schema.t() | nil
@doc """
Similar to `c:one/2` but raises `Ecto.NoResultsError` if no record was found.
Raises if more than one entry.
## Options
See the "Shared options" section at the module documentation.
"""
@callback one!(queryable :: Ecto.Queryable.t(), opts :: Keyword.t()) ::
Ecto.Schema.t()
@doc """
Preloads all associations on the given struct or structs.
This is similar to `Ecto.Query.preload/3` except it allows
you to preload structs after they have been fetched from the
database.
In case the association was already loaded, preload won't attempt
to reload it.
## Options
Besides the "Shared options" section at the module documentation,
it accepts:
* `:force` - By default, Ecto won't preload associations that
are already loaded. By setting this option to true, any existing
association will be discarded and reloaded.
* `:in_parallel` - If the preloads must be done in parallel. It can
only be performed when we have more than one preload and the
repository is not in a transaction. Defaults to `true`.
* `:prefix` - the prefix to fetch preloads from. By default, queries
will use the same prefix as the one in the given collection. This
option allows the prefix to be changed.
## Examples
# Use a single atom to preload an association
posts = Repo.preload posts, :comments
# Use a list of atoms to preload multiple associations
posts = Repo.preload posts, [:comments, :authors]
# Use a keyword list to preload nested associations as well
posts = Repo.preload posts, [comments: [:replies, :likes], authors: []]
# Use a keyword list to customize how associations are queried
posts = Repo.preload posts, [comments: from(c in Comment, order_by: c.published_at)]
# Use a two-element tuple for a custom query and nested association definition
query = from c in Comment, order_by: c.published_at
posts = Repo.preload posts, [comments: {query, [:replies, :likes]}]
The query given to preload may also preload its own associations.
"""
@callback preload(structs_or_struct_or_nil, preloads :: term, opts :: Keyword.t()) ::
structs_or_struct_or_nil
when structs_or_struct_or_nil: [Ecto.Schema.t()] | Ecto.Schema.t() | nil
@doc """
Fetches all entries from the data store matching the given query.
May raise `Ecto.QueryError` if query validation fails.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query.
See the "Shared options" section at the module documentation.
## Example
# Fetch all post titles
query = from p in Post,
select: p.title
MyRepo.all(query)
"""
@callback all(queryable :: Ecto.Queryable.t(), opts :: Keyword.t()) :: [Ecto.Schema.t()]
@doc """
Returns a lazy enumerable that emits all entries from the data store
matching the given query.
SQL adapters, such as Postgres and MySQL, can only enumerate a stream
inside a transaction.
May raise `Ecto.QueryError` if query validation fails.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query
* `:max_rows` - The number of rows to load from the database as we stream.
It is supported at least by Postgres and MySQL and defaults to 500.
See the "Shared options" section at the module documentation.
## Example
# Fetch all post titles
query = from p in Post,
select: p.title
stream = MyRepo.stream(query)
MyRepo.transaction(fn() ->
Enum.to_list(stream)
end)
"""
@callback stream(queryable :: Ecto.Queryable.t(), opts :: Keyword.t()) :: Enum.t()
@doc """
Updates all entries matching the given query with the given values.
It returns a tuple containing the number of entries and any returned
result as second element. The second element is `nil` by default
unless a `select` is supplied in the update query. Note, however,
not all databases support returning data from UPDATEs.
Keep in mind this `update_all` will not update autogenerated
fields like the `updated_at` columns.
See `Ecto.Query.update/3` for update operations that can be
performed on fields.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query.
See the "Shared options" section at the module documentation for
remaining options.
## Examples
MyRepo.update_all(Post, set: [title: "New title"])
MyRepo.update_all(Post, inc: [visits: 1])
MyRepo.update_all(Post, [inc: [visits: 1]], [returning: [:visits]])
from(p in Post, where: p.id < 10)
|> MyRepo.update_all(set: [title: "New title"])
from(p in Post, where: p.id < 10, update: [set: [title: "New title"]])
|> MyRepo.update_all([])
from(p in Post, where: p.id < 10, update: [set: [title: ^new_title]])
|> MyRepo.update_all([])
from(p in Post, where: p.id < 10, update: [set: [title: fragment("upper(?)", ^new_title)]])
|> MyRepo.update_all([])
"""
@callback update_all(
queryable :: Ecto.Queryable.t(),
updates :: Keyword.t(),
opts :: Keyword.t()
) :: {integer, nil | [term]}
@doc """
Deletes all entries matching the given query.
It returns a tuple containing the number of entries and any returned
result as second element. The second element is `nil` by default
unless a `select` is supplied in the update query. Note, however,
not all databases support returning data from DELETEs.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query.
See the "Shared options" section at the module documentation for
remaining options.
## Examples
MyRepo.delete_all(Post)
from(p in Post, where: p.id < 10) |> MyRepo.delete_all
"""
@callback delete_all(queryable :: Ecto.Queryable.t(), opts :: Keyword.t()) ::
{integer, nil | [term]}
## Ecto.Adapter.Schema
@optional_callbacks insert_all: 3, insert: 2, insert!: 2, update: 2, update!: 2,
delete: 2, delete!: 2, insert_or_update: 2, insert_or_update!: 2
@doc """
Inserts all entries into the repository.
It expects a schema module (`MyApp.User`) or a source (`"users"`) or
both (`{"users", MyApp.User}`) as the first argument. The second
argument is a list of entries to be inserted, either as keyword
lists or as maps. The keys of the entries are the field names as
atoms and the value should be the respective value for the field
type or, optionally, an `Ecto.Query` that returns a single entry
with a single value.
It returns a tuple containing the number of entries
and any returned result as second element. If the database
does not support RETURNING in INSERT statements or no
return result was selected, the second element will be `nil`.
When a schema module is given, the entries given will be properly dumped
before being sent to the database. If the schema contains an
autogenerated ID field, it will be handled either at the adapter
or the storage layer. However any other autogenerated value, like
timestamps, won't be autogenerated when using `c:insert_all/3`.
This is by design as this function aims to be a more direct way
to insert data into the database without the conveniences of
`c:insert/2`. This is also consistent with `c:update_all/3` that
does not handle timestamps as well.
It is also not possible to use `insert_all` to insert across multiple
tables, therefore associations are not supported.
If a source is given, without a schema module, the given fields are passed
as is to the adapter.
## Options
* `:returning` - selects which fields to return. When `true`,
returns all fields in the given schema. May be a list of
fields, where a struct is still returned but only with the
given fields. Or `false`, where nothing is returned (the default).
This option is not supported by all databases.
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL).
* `:on_conflict` - It may be one of `:raise` (the default), `:nothing`,
`:replace_all`, `:replace_all_except_primary_key`, `{:replace, fields}`,
a keyword list of update instructions, `{:replace, fields}` or an `Ecto.Query`
query for updates. See the "Upserts" section for more information.
* `:conflict_target` - A list of column names to verify for conflicts.
It is expected those columns to have unique indexes on them that may conflict.
If none is specified, the conflict target is left up to the database.
It may also be `{:constraint, constraint_name_as_atom}` in databases
that support the "ON CONSTRAINT" expression, such as PostgreSQL, or
`{:unsafe_fragment, binary_fragment}` to pass any expression to the
database without any sanitization, such as
`ON CONFLICT (coalesce(firstname, ""), coalesce(lastname, ""))`.
See the "Shared options" section at the module documentation for
remaining options.
## Examples
MyRepo.insert_all(Post, [[title: "My first post"], [title: "My second post"]])
MyRepo.insert_all(Post, [%{title: "My first post"}, %{title: "My second post"}])
## Upserts
`c:insert_all/3` provides upserts (update or inserts) via the `:on_conflict`
option. The `:on_conflict` option supports the following values:
* `:raise` - raises if there is a conflicting primary key or unique index
* `:nothing` - ignores the error in case of conflicts
* `:replace_all` - replace all values on the existing row by the new entry,
including values not sent explicitly by Ecto, such as database defaults.
This option requires a schema
* `:replace_all_except_primary_key` - same as above except primary keys are
not replaced. This option requires a schema
* `{:replace, fields}` - replace only specific columns. This option requires
conflict_target
* a keyword list of update instructions - such as the one given to
`c:update_all/3`, for example: `[set: [title: "new title"]]`
* an `Ecto.Query` that will act as an `UPDATE` statement, such as the
one given to `c:update_all/3`
Upserts map to "ON CONFLICT" on databases like Postgres and "ON DUPLICATE KEY"
on databases such as MySQL.
## Return values
By default, both Postgres and MySQL return the amount of entries
inserted on `c:insert_all/3`. However, when the `:on_conflict` option
is specified, Postgres will only return a row if it was affected
while MySQL returns at least the number of entries attempted.
For example, if `:on_conflict` is set to `:nothing`, Postgres will
return 0 if no new entry was added while MySQL will still return
the amount of entries attempted to be inserted, even if no entry
was added. Even worse, if `:on_conflict` is query, MySQL will return
the number of attempted entries plus the number of entries modified
by the UPDATE query.
"""
@callback insert_all(
schema_or_source :: binary | {binary, module} | module,
entries :: [map | [{atom, term | Ecto.Query.t}]],
opts :: Keyword.t()
) :: {integer, nil | [term]}
@doc """
Inserts a struct defined via `Ecto.Schema` or a changeset.
In case a struct is given, the struct is converted into a changeset
with all non-nil fields as part of the changeset.
In case a changeset is given, the changes in the changeset are
merged with the struct fields, and all of them are sent to the
database.
It returns `{:ok, struct}` if the struct has been successfully
inserted or `{:error, changeset}` if there was a validation
or a known constraint error.
## Options
* `:returning` - selects which fields to return. When `true`, returns
all fields in the given struct. May be a list of fields, where a
struct is still returned but only with the given fields. In any case,
it will include fields with `read_after_writes` set to true.
Not all databases support this option.
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the struct.
* `:on_conflict` - It may be one of `:raise` (the default), `:nothing`,
`:replace_all`, `:replace_all_except_primary_key`, `{:replace, fields}`,
a keyword list of update instructions or an `Ecto.Query` query for updates.
See the "Upserts" section for more information.
* `:conflict_target` - A list of column names to verify for conflicts.
It is expected those columns to have unique indexes on them that may conflict.
If none is specified, the conflict target is left up to the database.
May also be `{:constraint, constraint_name_as_atom}` in databases
that support the "ON CONSTRAINT" expression, such as PostgreSQL.
* `:stale_error_field` - The field where stale errors will be added in
the returning changeset. This option can be used to avoid raising
`Ecto.StaleEntryError`.
* `:stale_error_message` - The message to add to the configured
`:stale_error_field` when stale errors happen, defaults to "is stale".
See the "Shared options" section at the module documentation.
## Examples
A typical example is calling `MyRepo.insert/1` with a struct
and acting on the return value:
case MyRepo.insert %Post{title: "Ecto is great"} do
{:ok, struct} -> # Inserted with success
{:error, changeset} -> # Something went wrong
end
## Upserts
`c:insert/2` provides upserts (update or inserts) via the `:on_conflict`
option. The `:on_conflict` option supports the following values:
* `:raise` - raises if there is a conflicting primary key or unique index
* `:nothing` - ignores the error in case of conflicts
* `:replace_all` - replace all values on the existing row with the values
in the schema/changeset, including autogenerated fields such as `inserted_at`
and `updated_at`
* `:replace_all_except_primary_key` - same as above except primary keys are
not replaced
* `{:replace, fields}` - replace only specific columns. This option requires
conflict_target
* a keyword list of update instructions - such as the one given to
`c:update_all/3`, for example: `[set: [title: "new title"]]`
* an `Ecto.Query` that will act as an `UPDATE` statement, such as the
one given to `c:update_all/3`. If the struct cannot be found, `Ecto.StaleEntryError`
will be raised.
Upserts map to "ON CONFLICT" on databases like Postgres and "ON DUPLICATE KEY"
on databases such as MySQL.
As an example, imagine `:title` is marked as a unique column in
the database:
{:ok, inserted} = MyRepo.insert(%Post{title: "this is unique"})
Now we can insert with the same title but do nothing on conflicts:
{:ok, ignored} = MyRepo.insert(%Post{title: "this is unique"}, on_conflict: :nothing)
assert ignored.id == nil
Because we used `on_conflict: :nothing`, instead of getting an error,
we got `{:ok, struct}`. However the returned struct does not reflect
the data in the database. One possible mechanism to detect if an
insert or nothing happened in case of `on_conflict: :nothing` is by
checking the `id` field. `id` will be nil if the field is autogenerated
by the database and no insert happened.
For actual upserts, where an insert or update may happen, the situation
is slightly more complex, as the database does not actually inform us
if an insert or update happened. Let's insert a post with the same title
but use a query to update the body column in case of conflicts:
# In Postgres (it requires the conflict target for updates):
on_conflict = [set: [body: "updated"]]
{:ok, updated} = MyRepo.insert(%Post{title: "this is unique"},
on_conflict: on_conflict, conflict_target: :title)
# In MySQL (conflict target is not supported):
on_conflict = [set: [title: "updated"]]
{:ok, updated} = MyRepo.insert(%Post{id: inserted.id, title: "updated"},
on_conflict: on_conflict)
In the examples above, even though it returned `:ok`, we do not know
if we inserted new data or if we updated only the `:on_conflict` fields.
In case an update happened, the data in the struct most likely does
not match the data in the database. For example, autogenerated fields
such as `inserted_at` will point to now rather than the time the
struct was actually inserted.
If you need to guarantee the data in the returned struct mirrors the
database, you have three options:
* Use `on_conflict: :replace_all`, although that will replace all
fields in the database with the ones in the struct/changeset,
including autogenerated fields such as `insert_at` and `updated_at`:
MyRepo.insert(%Post{title: "this is unique"},
on_conflict: :replace_all, conflict_target: :title)
* Specify `read_after_writes: true` in your schema for choosing
fields that are read from the database after every operation.
Or pass `returning: true` to `insert` to read all fields back:
MyRepo.insert(%Post{title: "this is unique"}, returning: true,
on_conflict: on_conflict, conflict_target: :title)
* Alternatively, read the data again from the database in a separate
query. This option requires the primary key to be generated by the
database:
{:ok, updated} = MyRepo.insert(%Post{title: "this is unique"}, on_conflict: on_conflict)
Repo.get(Post, updated.id)
Because of the inability to know if the struct is up to date or not,
using associations with the `:on_conflict` option is not recommended.
For instance, Ecto may even trigger constraint violations when associations
are used with `on_conflict: :nothing`, as no ID will be available in
the case the record already exists, and it is not possible for Ecto to
detect such cases reliably.
"""
@callback insert(
struct_or_changeset :: Ecto.Schema.t() | Ecto.Changeset.t(),
opts :: Keyword.t()
) :: {:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
@doc """
Updates a changeset using its primary key.
A changeset is required as it is the only mechanism for
tracking dirty changes. Only the fields present in the `changes` part
of the changeset are sent to the database. Any other, in-memory
changes done to the schema are ignored.
If the struct has no primary key, `Ecto.NoPrimaryKeyFieldError`
will be raised.
If the struct cannot be found, `Ecto.StaleEntryError` will be raised.
It returns `{:ok, struct}` if the struct has been successfully
updated or `{:error, changeset}` if there was a validation
or a known constraint error.
## Options
Besides the "Shared options" section at the module documentation,
it accepts:
* `:force` - By default, if there are no changes in the changeset,
`c:update/2` is a no-op. By setting this option to true, update
callbacks will always be executed, even if there are no changes
(including timestamps).
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the struct.
* `:stale_error_field` - The field where stale errors will be added in
the returning changeset. This option can be used to avoid raising
`Ecto.StaleEntryError`.
* `:stale_error_message` - The message to add to the configured
`:stale_error_field` when stale errors happen, defaults to "is stale".
## Example
post = MyRepo.get!(Post, 42)
post = Ecto.Changeset.change post, title: "New title"
case MyRepo.update post do
{:ok, struct} -> # Updated with success
{:error, changeset} -> # Something went wrong
end
"""
@callback update(changeset :: Ecto.Changeset.t(), opts :: Keyword.t()) ::
{:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
@doc """
Inserts or updates a changeset depending on whether the struct is persisted
or not.
The distinction whether to insert or update will be made on the
`Ecto.Schema.Metadata` field `:state`. The `:state` is automatically set by
Ecto when loading or building a schema.
Please note that for this to work, you will have to load existing structs from
the database. So even if the struct exists, this won't work:
struct = %Post{id: "existing_id", ...}
MyRepo.insert_or_update changeset
# => {:error, changeset} # id already exists
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the struct.
See the "Shared options" section at the module documentation.
## Example
result =
case MyRepo.get(Post, id) do
nil -> %Post{id: id} # Post not found, we build one
post -> post # Post exists, let's use it
end
|> Post.changeset(changes)
|> MyRepo.insert_or_update
case result do
{:ok, struct} -> # Inserted or updated with success
{:error, changeset} -> # Something went wrong
end
"""
@callback insert_or_update(changeset :: Ecto.Changeset.t(), opts :: Keyword.t()) ::
{:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
@doc """
Deletes a struct using its primary key.
If the struct has no primary key, `Ecto.NoPrimaryKeyFieldError`
will be raised. If the struct has been removed from db prior to
call, `Ecto.StaleEntryError` will be raised.
It returns `{:ok, struct}` if the struct has been successfully
deleted or `{:error, changeset}` if there was a validation
or a known constraint error.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the struct.
* `:stale_error_field` - The field where stale errors will be added in
the returning changeset. This option can be used to avoid raising
`Ecto.StaleEntryError`.
* `:stale_error_message` - The message to add to the configured
`:stale_error_field` when stale errors happen, defaults to "is stale".
See the "Shared options" section at the module documentation.
## Example
post = MyRepo.get!(Post, 42)
case MyRepo.delete post do
{:ok, struct} -> # Deleted with success
{:error, changeset} -> # Something went wrong
end
"""
@callback delete(
struct_or_changeset :: Ecto.Schema.t() | Ecto.Changeset.t(),
opts :: Keyword.t()
) :: {:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
@doc """
Same as `c:insert/2` but returns the struct or raises if the changeset is invalid.
"""
@callback insert!(
struct_or_changeset :: Ecto.Schema.t() | Ecto.Changeset.t(),
opts :: Keyword.t()
) :: Ecto.Schema.t()
@doc """
Same as `c:update/2` but returns the struct or raises if the changeset is invalid.
"""
@callback update!(changeset :: Ecto.Changeset.t(), opts :: Keyword.t()) ::
Ecto.Schema.t()
@doc """
Same as `c:insert_or_update/2` but returns the struct or raises if the changeset
is invalid.
"""
@callback insert_or_update!(changeset :: Ecto.Changeset.t(), opts :: Keyword.t()) ::
Ecto.Schema.t()
@doc """
Same as `c:delete/2` but returns the struct or raises if the changeset is invalid.
"""
@callback delete!(
struct_or_changeset :: Ecto.Schema.t() | Ecto.Changeset.t(),
opts :: Keyword.t()
) :: Ecto.Schema.t()
## Ecto.Adapter.Transaction
@optional_callbacks transaction: 2, in_transaction?: 0, rollback: 1
@doc """
Runs the given function or `Ecto.Multi` inside a transaction.
## Use with function
If an unhandled error occurs the transaction will be rolled back
and the error will bubble up from the transaction function.
If no error occurred the transaction will be committed when the
function returns. A transaction can be explicitly rolled back
by calling `c:rollback/1`, this will immediately leave the function
and return the value given to `rollback` as `{:error, value}`.
A successful transaction returns the value returned by the function
wrapped in a tuple as `{:ok, value}`.
If `c:transaction/2` is called inside another transaction, the function
is simply executed, without wrapping the new transaction call in any
way. If there is an error in the inner transaction and the error is
rescued, or the inner transaction is rolled back, the whole outer
transaction is marked as tainted, guaranteeing nothing will be committed.
## Use with Ecto.Multi
Besides functions transaction can be used with an Ecto.Multi struct.
Transaction will be started, all operations applied and in case of
success committed returning `{:ok, changes}`. In case of any errors
the transaction will be rolled back and
`{:error, failed_operation, failed_value, changes_so_far}` will be
returned.
You can read more about using transactions with `Ecto.Multi` as well as
see some examples in the `Ecto.Multi` documentation.
## Options
See the "Shared options" section at the module documentation.
## Examples
import Ecto.Changeset, only: [change: 2]
MyRepo.transaction(fn ->
MyRepo.update!(change(alice, balance: alice.balance - 10))
MyRepo.update!(change(bob, balance: bob.balance + 10))
end)
# Roll back a transaction explicitly
MyRepo.transaction(fn ->
p = MyRepo.insert!(%Post{})
if not Editor.post_allowed?(p) do
MyRepo.rollback(:posting_not_allowed)
end
end)
# With Ecto.Multi
Ecto.Multi.new
|> Ecto.Multi.insert(:post, %Post{})
|> MyRepo.transaction
"""
@callback transaction(fun_or_multi :: fun | Ecto.Multi.t(), opts :: Keyword.t()) ::
{:ok, any}
| {:error, any}
| {:error, Ecto.Multi.name(), any, %{Ecto.Multi.name() => any}}
@doc """
Returns true if the current process is inside a transaction.
If you are using the `Ecto.Adapters.SQL.Sandbox` in tests, note that even
though each test is inside a transaction, `in_transaction?/0` will only
return true inside transactions explicitly created with `transaction/2`. This
is done so the test environment mimics dev and prod.
If you are trying to debug transaction-related code while using
`Ecto.Adapters.SQL.Sandbox`, it may be more helpful to configure the database
to log all statements and consult those logs.
## Examples
MyRepo.in_transaction?
#=> false
MyRepo.transaction(fn ->
MyRepo.in_transaction? #=> true
end)
"""
@callback in_transaction?() :: boolean
@doc """
Rolls back the current transaction.
The transaction will return the value given as `{:error, value}`.
"""
@callback rollback(value :: any) :: no_return
end
| 35.789346 | 102 | 0.667276 |
034383bb57cca5664ab1567f0b78d558d50e6049 | 36,327 | ex | Elixir | lib/elixir/lib/exception.ex | michalmuskala/elixir | 097de7d308ac40747b4524e266558a3d9498a60b | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/exception.ex | michalmuskala/elixir | 097de7d308ac40747b4524e266558a3d9498a60b | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/exception.ex | michalmuskala/elixir | 097de7d308ac40747b4524e266558a3d9498a60b | [
"Apache-2.0"
] | null | null | null | defmodule Exception do
@moduledoc """
Functions to format throw/catch/exit and exceptions.
Note that stacktraces in Elixir are updated on throw,
errors and exits. For example, at any given moment,
`System.stacktrace/0` will return the stacktrace for the
last throw/error/exit that occurred in the current process.
Do not rely on the particular format returned by the `format*`
functions in this module. They may be changed in future releases
in order to better suit Elixir's tool chain. In other words,
by using the functions in this module it is guaranteed you will
format exceptions as in the current Elixir version being used.
"""
@typedoc "The exception type"
@type t :: %{
required(:__struct__) => module,
required(:__exception__) => true,
optional(atom) => any
}
@typedoc "The kind handled by formatting functions"
@type kind :: :error | non_error_kind
@typep non_error_kind :: :exit | :throw | {:EXIT, pid}
@type stacktrace :: [stacktrace_entry]
@type stacktrace_entry ::
{module, atom, arity_or_args, location}
| {(... -> any), arity_or_args, location}
@typep arity_or_args :: non_neg_integer | list
@typep location :: keyword
@callback exception(term) :: t
@callback message(t) :: String.t()
@doc """
Called from `Exception.blame/3` to augment the exception struct.
Can be used to collect additional information about the exception
or do some additional expensive computation.
"""
@callback blame(t, stacktrace) :: {t, stacktrace}
@optional_callbacks [blame: 2]
@doc """
Returns `true` if the given `term` is an exception.
"""
def exception?(term)
def exception?(%_{__exception__: true}), do: true
def exception?(_), do: false
@doc """
Gets the message for an `exception`.
"""
def message(%module{__exception__: true} = exception) do
try do
module.message(exception)
rescue
caught_exception ->
"got #{inspect(caught_exception.__struct__)} with message " <>
"#{inspect(message(caught_exception))} while retrieving Exception.message/1 " <>
"for #{inspect(exception)}"
else
result when is_binary(result) ->
result
result ->
"got #{inspect(result)} " <>
"while retrieving Exception.message/1 for #{inspect(exception)} " <>
"(expected a string)"
end
end
@doc """
Normalizes an exception, converting Erlang exceptions
to Elixir exceptions.
It takes the `kind` spilled by `catch` as an argument and
normalizes only `:error`, returning the untouched payload
for others.
The third argument, a stacktrace, is optional. If it is
not supplied `System.stacktrace/0` will sometimes be used
to get additional information for the `kind` `:error`. If
the stacktrace is unknown and `System.stacktrace/0` would
not return the stacktrace corresponding to the exception
an empty stacktrace, `[]`, must be used.
"""
@spec normalize(:error, any, stacktrace) :: t
@spec normalize(non_error_kind, payload, stacktrace) :: payload when payload: var
# Generating a stacktrace is expensive, default to nil
# to only fetch it when needed.
def normalize(kind, payload, stacktrace \\ nil)
def normalize(:error, exception, stacktrace) do
if exception?(exception) do
exception
else
ErlangError.normalize(exception, stacktrace)
end
end
def normalize(_kind, payload, _stacktrace) do
payload
end
@doc """
Normalizes and formats any throw/error/exit.
The message is formatted and displayed in the same
format as used by Elixir's CLI.
The third argument, a stacktrace, is optional. If it is
not supplied `System.stacktrace/0` will sometimes be used
to get additional information for the `kind` `:error`. If
the stacktrace is unknown and `System.stacktrace/0` would
not return the stacktrace corresponding to the exception
an empty stacktrace, `[]`, must be used.
"""
@spec format_banner(kind, any, stacktrace | nil) :: String.t()
def format_banner(kind, exception, stacktrace \\ nil)
def format_banner(:error, exception, stacktrace) do
exception = normalize(:error, exception, stacktrace)
"** (" <> inspect(exception.__struct__) <> ") " <> message(exception)
end
def format_banner(:throw, reason, _stacktrace) do
"** (throw) " <> inspect(reason)
end
def format_banner(:exit, reason, _stacktrace) do
"** (exit) " <> format_exit(reason, <<"\n ">>)
end
def format_banner({:EXIT, pid}, reason, _stacktrace) do
"** (EXIT from #{inspect(pid)}) " <> format_exit(reason, <<"\n ">>)
end
@doc """
Normalizes and formats throw/errors/exits and stacktraces.
It relies on `format_banner/3` and `format_stacktrace/1`
to generate the final format.
Note that `{:EXIT, pid}` do not generate a stacktrace though
(as they are retrieved as messages without stacktraces).
"""
@spec format(kind, any, stacktrace | nil) :: String.t()
def format(kind, payload, stacktrace \\ nil)
def format({:EXIT, _} = kind, any, _) do
format_banner(kind, any)
end
def format(kind, payload, stacktrace) do
stacktrace = stacktrace || System.stacktrace()
message = format_banner(kind, payload, stacktrace)
case stacktrace do
[] -> message
_ -> message <> "\n" <> format_stacktrace(stacktrace)
end
end
@doc """
Attaches information to exceptions for extra debugging.
This operation is potentially expensive, as it reads data
from the filesystem, parse beam files, evaluates code and
so on.
If the exception module implements the optional `c:blame/2`
callback, it will be invoked to perform the computation.
"""
@since "1.5.0"
@spec blame(:error, any, stacktrace) :: {t, stacktrace}
@spec blame(non_error_kind, payload, stacktrace) :: {payload, stacktrace} when payload: var
def blame(kind, error, stacktrace)
def blame(:error, error, stacktrace) do
%module{} = struct = normalize(:error, error, stacktrace)
if Code.ensure_loaded?(module) and function_exported?(module, :blame, 2) do
module.blame(struct, stacktrace)
else
{struct, stacktrace}
end
end
def blame(_kind, reason, stacktrace) do
{reason, stacktrace}
end
@doc """
Blames the invocation of the given module, function and arguments.
This function will retrieve the available clauses from bytecode
and evaluate them against the given arguments. The clauses are
returned as a list of `{args, guards}` pairs where each argument
and each top-level condition in a guard separated by `and`/`or`
is wrapped in a tuple with blame metadata.
This function returns either `{:ok, definition, clauses}` or `:error`.
Where `definition` is `:def`, `:defp`, `:defmacro` or `:defmacrop`.
Note this functionality requires Erlang/OTP 20, otherwise `:error`
is always returned.
"""
@since "1.5.0"
@spec blame_mfa(module, function, args :: [term]) ::
{:ok, :def | :defp | :defmacro | :defmacrop, [{args :: [term], guards :: [term]}]}
| :error
def blame_mfa(module, function, args)
when is_atom(module) and is_atom(function) and is_list(args) do
try do
blame_mfa(module, function, length(args), args)
rescue
_ -> :error
end
end
defp blame_mfa(module, function, arity, call_args) do
with [_ | _] = path <- :code.which(module),
{:ok, {_, [debug_info: debug_info]}} <- :beam_lib.chunks(path, [:debug_info]),
{:debug_info_v1, backend, data} <- debug_info,
{:ok, %{definitions: defs}} <- backend.debug_info(:elixir_v1, module, data, []),
{_, kind, _, clauses} <- List.keyfind(defs, {function, arity}, 0) do
clauses =
for {meta, ex_args, guards, _block} <- clauses do
scope = :elixir_erl.scope(meta)
{erl_args, scope} =
:elixir_erl_clauses.match(&:elixir_erl_pass.translate_args/2, ex_args, scope)
{args, binding} =
[call_args, ex_args, erl_args]
|> Enum.zip()
|> Enum.map_reduce([], &blame_arg/2)
guards = Enum.map(guards, &blame_guard(&1, scope, binding))
{args, guards}
end
{:ok, kind, clauses}
else
_ -> :error
end
end
defp blame_arg({call_arg, ex_arg, erl_arg}, binding) do
{match?, binding} = blame_arg(erl_arg, call_arg, binding)
{blame_wrap(match?, rewrite_arg(ex_arg)), binding}
end
defp blame_arg(erl_arg, call_arg, binding) do
binding = :orddict.store(:VAR, call_arg, binding)
try do
{:value, _, binding} = :erl_eval.expr({:match, 0, erl_arg, {:var, 0, :VAR}}, binding, :none)
{true, binding}
rescue
_ -> {false, binding}
end
end
defp rewrite_arg(arg) do
Macro.prewalk(arg, fn
{:%{}, meta, [__struct__: Range, first: first, last: last]} ->
{:.., meta, [first, last]}
other ->
other
end)
end
defp blame_guard({{:., _, [:erlang, op]}, meta, [left, right]}, scope, binding)
when op == :andalso or op == :orelse do
guards = [
blame_guard(left, scope, binding),
blame_guard(right, scope, binding)
]
{rewrite_guard_call(op), meta, guards}
end
defp blame_guard(ex_guard, scope, binding) do
{erl_guard, _} = :elixir_erl_pass.translate(ex_guard, scope)
match? =
try do
{:value, true, _} = :erl_eval.expr(erl_guard, binding, :none)
true
rescue
_ -> false
end
blame_wrap(match?, rewrite_guard(ex_guard))
end
defp rewrite_guard(guard) do
Macro.prewalk(guard, fn
{{:., _, [:erlang, :element]}, _, [{{:., _, [:erlang, :+]}, _, [int, 1]}, arg]} ->
{:elem, [], [arg, int]}
{{:., _, [:erlang, :element]}, _, [int, arg]} when is_integer(int) ->
{:elem, [], [arg, int - 1]}
{:., _, [:erlang, call]} ->
rewrite_guard_call(call)
other ->
other
end)
end
defp rewrite_guard_call(:orelse), do: :or
defp rewrite_guard_call(:andalso), do: :and
defp rewrite_guard_call(:"=<"), do: :<=
defp rewrite_guard_call(:"/="), do: :!=
defp rewrite_guard_call(:"=:="), do: :===
defp rewrite_guard_call(:"=/="), do: :!==
defp rewrite_guard_call(op) when op in [:band, :bor, :bnot, :bsl, :bsr, :bxor],
do: {:., [], [Bitwise, op]}
defp rewrite_guard_call(op) when op in [:xor, :element, :size], do: {:., [], [:erlang, op]}
defp rewrite_guard_call(op), do: op
defp blame_wrap(match?, ast), do: %{match?: match?, node: ast}
@doc """
Formats an exit. It returns a string.
Often there are errors/exceptions inside exits. Exits are often
wrapped by the caller and provide stacktraces too. This function
formats exits in a way to nicely show the exit reason, caller
and stacktrace.
"""
@spec format_exit(any) :: String.t()
def format_exit(reason) do
format_exit(reason, <<"\n ">>)
end
# 2-Tuple could be caused by an error if the second element is a stacktrace.
defp format_exit({exception, maybe_stacktrace} = reason, joiner)
when is_list(maybe_stacktrace) and maybe_stacktrace !== [] do
try do
Enum.map(maybe_stacktrace, &format_stacktrace_entry/1)
catch
:error, _ ->
# Not a stacktrace, was an exit.
format_exit_reason(reason)
else
formatted_stacktrace ->
# Assume a non-empty list formattable as stacktrace is a
# stacktrace, so exit was caused by an error.
message =
"an exception was raised:" <>
joiner <> format_banner(:error, exception, maybe_stacktrace)
Enum.join([message | formatted_stacktrace], joiner <> <<" ">>)
end
end
# :supervisor.start_link returns this error reason when it fails to init
# because a child's start_link raises.
defp format_exit({:shutdown, {:failed_to_start_child, child, {:EXIT, reason}}}, joiner) do
format_start_child(child, reason, joiner)
end
# :supervisor.start_link returns this error reason when it fails to init
# because a child's start_link returns {:error, reason}.
defp format_exit({:shutdown, {:failed_to_start_child, child, reason}}, joiner) do
format_start_child(child, reason, joiner)
end
# 2-Tuple could be an exit caused by mfa if second element is mfa, args
# must be a list of arguments - max length 255 due to max arity.
defp format_exit({reason2, {mod, fun, args}} = reason, joiner)
when length(args) < 256 do
try do
format_mfa(mod, fun, args)
catch
:error, _ ->
# Not an mfa, was an exit.
format_exit_reason(reason)
else
mfa ->
# Assume tuple formattable as an mfa is an mfa, so exit was caused by
# failed mfa.
"exited in: " <>
mfa <> joiner <> "** (EXIT) " <> format_exit(reason2, joiner <> <<" ">>)
end
end
defp format_exit(reason, _joiner) do
format_exit_reason(reason)
end
defp format_exit_reason(:normal), do: "normal"
defp format_exit_reason(:shutdown), do: "shutdown"
defp format_exit_reason({:shutdown, reason}) do
"shutdown: #{inspect(reason)}"
end
defp format_exit_reason(:calling_self), do: "process attempted to call itself"
defp format_exit_reason(:timeout), do: "time out"
defp format_exit_reason(:killed), do: "killed"
defp format_exit_reason(:noconnection), do: "no connection"
defp format_exit_reason(:noproc) do
"no process: the process is not alive or there's no process currently associated with the given name, possibly because its application isn't started"
end
defp format_exit_reason({:nodedown, node_name}) when is_atom(node_name) do
"no connection to #{node_name}"
end
# :gen_server exit reasons
defp format_exit_reason({:already_started, pid}) do
"already started: " <> inspect(pid)
end
defp format_exit_reason({:bad_return_value, value}) do
"bad return value: " <> inspect(value)
end
defp format_exit_reason({:bad_call, request}) do
"bad call: " <> inspect(request)
end
defp format_exit_reason({:bad_cast, request}) do
"bad cast: " <> inspect(request)
end
# :supervisor.start_link error reasons
# If value is a list will be formatted by mfa exit in format_exit/1
defp format_exit_reason({:bad_return, {mod, :init, value}})
when is_atom(mod) do
format_mfa(mod, :init, 1) <> " returned a bad value: " <> inspect(value)
end
defp format_exit_reason({:bad_start_spec, start_spec}) do
"bad child specification, invalid children: " <> inspect(start_spec)
end
defp format_exit_reason({:start_spec, start_spec}) do
"bad child specification, " <> format_sup_spec(start_spec)
end
defp format_exit_reason({:supervisor_data, data}) do
"bad supervisor configuration, " <> format_sup_data(data)
end
defp format_exit_reason(reason), do: inspect(reason)
defp format_start_child(child, reason, joiner) do
"shutdown: failed to start child: " <>
inspect(child) <> joiner <> "** (EXIT) " <> format_exit(reason, joiner <> <<" ">>)
end
defp format_sup_data({:invalid_type, type}) do
"invalid type: " <> inspect(type)
end
defp format_sup_data({:invalid_strategy, strategy}) do
"invalid strategy: " <> inspect(strategy)
end
defp format_sup_data({:invalid_intensity, intensity}) do
"invalid max_restarts (intensity): " <> inspect(intensity)
end
defp format_sup_data({:invalid_period, period}) do
"invalid max_seconds (period): " <> inspect(period)
end
defp format_sup_data({:invalid_max_children, max_children}) do
"invalid max_children: " <> inspect(max_children)
end
defp format_sup_data({:invalid_extra_arguments, extra}) do
"invalid extra_arguments: " <> inspect(extra)
end
defp format_sup_data(other), do: "got: #{inspect(other)}"
defp format_sup_spec({:duplicate_child_name, id}) do
"""
more than one child specification has the id: #{inspect(id)}.
If using maps as child specifications, make sure the :id keys are unique.
If using a module or {module, arg} as child, use Supervisor.child_spec/2 to change the :id, for example:
children = [
Supervisor.child_spec({MyWorker, arg}, id: :my_worker_1),
Supervisor.child_spec({MyWorker, arg}, id: :my_worker_2)
]
"""
end
defp format_sup_spec({:invalid_child_spec, child_spec}) do
"invalid child specification: #{inspect(child_spec)}"
end
defp format_sup_spec({:invalid_child_type, type}) do
"invalid child type: #{inspect(type)}. Must be :worker or :supervisor."
end
defp format_sup_spec({:invalid_mfa, mfa}) do
"invalid mfa: #{inspect(mfa)}"
end
defp format_sup_spec({:invalid_restart_type, restart}) do
"invalid restart type: #{inspect(restart)}. Must be :permanent, :transient or :temporary."
end
defp format_sup_spec({:invalid_shutdown, shutdown}) do
"invalid shutdown: #{inspect(shutdown)}. Must be an integer >= 0, :infinity or :brutal_kill."
end
defp format_sup_spec({:invalid_module, mod}) do
"invalid module: #{inspect(mod)}. Must be an atom."
end
defp format_sup_spec({:invalid_modules, modules}) do
"invalid modules: #{inspect(modules)}. Must be a list of atoms or :dynamic."
end
defp format_sup_spec(other), do: "got: #{inspect(other)}"
@doc """
Receives a stacktrace entry and formats it into a string.
"""
@spec format_stacktrace_entry(stacktrace_entry) :: String.t()
def format_stacktrace_entry(entry)
# From Macro.Env.stacktrace
def format_stacktrace_entry({module, :__MODULE__, 0, location}) do
format_location(location) <> inspect(module) <> " (module)"
end
# From :elixir_compiler_*
def format_stacktrace_entry({_module, :__MODULE__, 1, location}) do
format_location(location) <> "(module)"
end
# From :elixir_compiler_*
def format_stacktrace_entry({_module, :__FILE__, 1, location}) do
format_location(location) <> "(file)"
end
def format_stacktrace_entry({module, fun, arity, location}) do
format_application(module) <> format_location(location) <> format_mfa(module, fun, arity)
end
def format_stacktrace_entry({fun, arity, location}) do
format_location(location) <> format_fa(fun, arity)
end
defp format_application(module) do
# We cannot use Application due to bootstrap issues
case :application.get_application(module) do
{:ok, app} -> "(" <> Atom.to_string(app) <> ") "
:undefined -> ""
end
end
@doc """
Formats the stacktrace.
A stacktrace must be given as an argument. If not, the stacktrace
is retrieved from `Process.info/2`.
"""
def format_stacktrace(trace \\ nil) do
trace =
if trace do
trace
else
case Process.info(self(), :current_stacktrace) do
{:current_stacktrace, t} -> Enum.drop(t, 3)
end
end
case trace do
[] -> "\n"
_ -> " " <> Enum.map_join(trace, "\n ", &format_stacktrace_entry(&1)) <> "\n"
end
end
@doc """
Receives an anonymous function and arity and formats it as
shown in stacktraces. The arity may also be a list of arguments.
## Examples
Exception.format_fa(fn -> nil end, 1)
#=> "#Function<...>/1"
"""
def format_fa(fun, arity) when is_function(fun) do
"#{inspect(fun)}#{format_arity(arity)}"
end
@doc """
Receives a module, fun and arity and formats it
as shown in stacktraces. The arity may also be a list
of arguments.
## Examples
iex> Exception.format_mfa Foo, :bar, 1
"Foo.bar/1"
iex> Exception.format_mfa Foo, :bar, []
"Foo.bar()"
iex> Exception.format_mfa nil, :bar, []
"nil.bar()"
Anonymous functions are reported as -func/arity-anonfn-count-,
where func is the name of the enclosing function. Convert to
"anonymous fn in func/arity"
"""
def format_mfa(module, fun, arity) when is_atom(module) and is_atom(fun) do
case Code.Identifier.extract_anonymous_fun_parent(fun) do
{outer_name, outer_arity} ->
"anonymous fn#{format_arity(arity)} in " <>
"#{Code.Identifier.inspect_as_atom(module)}." <>
"#{Code.Identifier.inspect_as_function(outer_name)}/#{outer_arity}"
:error ->
"#{Code.Identifier.inspect_as_atom(module)}." <>
"#{Code.Identifier.inspect_as_function(fun)}#{format_arity(arity)}"
end
end
defp format_arity(arity) when is_list(arity) do
inspected = for x <- arity, do: inspect(x)
"(#{Enum.join(inspected, ", ")})"
end
defp format_arity(arity) when is_integer(arity) do
"/" <> Integer.to_string(arity)
end
@doc """
Formats the given `file` and `line` as shown in stacktraces.
If any of the values are `nil`, they are omitted.
## Examples
iex> Exception.format_file_line("foo", 1)
"foo:1:"
iex> Exception.format_file_line("foo", nil)
"foo:"
iex> Exception.format_file_line(nil, nil)
""
"""
def format_file_line(file, line, suffix \\ "") do
if file do
if line && line != 0 do
"#{file}:#{line}:#{suffix}"
else
"#{file}:#{suffix}"
end
else
""
end
end
defp format_location(opts) when is_list(opts) do
format_file_line(Keyword.get(opts, :file), Keyword.get(opts, :line), " ")
end
end
# Some exceptions implement "message/1" instead of "exception/1" mostly
# for bootstrap reasons. It is recommended for applications to implement
# "exception/1" instead of "message/1" as described in "defexception/1"
# docs.
defmodule RuntimeError do
defexception message: "runtime error"
end
defmodule ArgumentError do
defexception message: "argument error"
end
defmodule ArithmeticError do
defexception message: "bad argument in arithmetic expression"
end
defmodule SystemLimitError do
defexception []
def message(_) do
"a system limit has been reached"
end
end
defmodule SyntaxError do
defexception [:file, :line, description: "syntax error"]
def message(exception) do
Exception.format_file_line(Path.relative_to_cwd(exception.file), exception.line) <>
" " <> exception.description
end
end
defmodule TokenMissingError do
defexception [:file, :line, description: "expression is incomplete"]
def message(%{file: file, line: line, description: description}) do
Exception.format_file_line(file && Path.relative_to_cwd(file), line) <> " " <> description
end
end
defmodule CompileError do
defexception [:file, :line, description: "compile error"]
def message(%{file: file, line: line, description: description}) do
Exception.format_file_line(file && Path.relative_to_cwd(file), line) <> " " <> description
end
end
defmodule BadFunctionError do
defexception [:term]
def message(exception) do
"expected a function, got: #{inspect(exception.term)}"
end
end
defmodule BadStructError do
defexception [:struct, :term]
def message(exception) do
"expected a struct named #{inspect(exception.struct)}, got: #{inspect(exception.term)}"
end
end
defmodule BadMapError do
defexception [:term]
def message(exception) do
"expected a map, got: #{inspect(exception.term)}"
end
end
defmodule BadBooleanError do
defexception [:term, :operator]
def message(exception) do
"expected a boolean on left-side of \"#{exception.operator}\", got: #{inspect(exception.term)}"
end
end
defmodule MatchError do
defexception [:term]
def message(exception) do
"no match of right hand side value: #{inspect(exception.term)}"
end
end
defmodule CaseClauseError do
defexception [:term]
def message(exception) do
"no case clause matching: #{inspect(exception.term)}"
end
end
defmodule WithClauseError do
defexception [:term]
def message(exception) do
"no with clause matching: #{inspect(exception.term)}"
end
end
defmodule CondClauseError do
defexception []
def message(_exception) do
"no cond clause evaluated to a true value"
end
end
defmodule TryClauseError do
defexception [:term]
def message(exception) do
"no try clause matching: #{inspect(exception.term)}"
end
end
defmodule BadArityError do
defexception [:function, :args]
def message(exception) do
fun = exception.function
args = exception.args
insp = Enum.map_join(args, ", ", &inspect/1)
{:arity, arity} = :erlang.fun_info(fun, :arity)
"#{inspect(fun)} with arity #{arity} called with #{count(length(args), insp)}"
end
defp count(0, _insp), do: "no arguments"
defp count(1, insp), do: "1 argument (#{insp})"
defp count(x, insp), do: "#{x} arguments (#{insp})"
end
defmodule UndefinedFunctionError do
defexception [:module, :function, :arity, :reason, :exports]
def message(%{reason: nil, module: module, function: function, arity: arity} = e) do
cond do
is_nil(function) or is_nil(arity) ->
"undefined function"
not is_nil(module) and :code.is_loaded(module) == false ->
message(%{e | reason: :"module could not be loaded"})
true ->
message(%{e | reason: :"function not exported"})
end
end
def message(%{
reason: :"module could not be loaded",
module: module,
function: function,
arity: arity
}) do
formatted_fun = Exception.format_mfa(module, function, arity)
"function #{formatted_fun} is undefined (module #{inspect(module)} is not available)"
end
def message(%{
reason: :"function not exported",
module: module,
function: function,
arity: arity
}) do
IO.iodata_to_binary(function_not_exported(module, function, arity, nil))
end
def message(%{
reason: :"function not available",
module: module,
function: function,
arity: arity
}) do
"nil." <> fa = Exception.format_mfa(nil, function, arity)
"function " <>
Exception.format_mfa(module, function, arity) <>
" is undefined (function #{fa} is not available)"
end
def message(%{reason: reason, module: module, function: function, arity: arity}) do
"function " <> Exception.format_mfa(module, function, arity) <> " is undefined (#{reason})"
end
@doc false
def function_not_exported(module, function, arity, exports) do
suffix =
if macro_exported?(module, function, arity) do
". However there is a macro with the same name and arity. " <>
"Be sure to require #{inspect(module)} if you intend to invoke this macro"
else
did_you_mean(module, function, exports)
end
[
"function ",
Exception.format_mfa(module, function, arity),
" is undefined or private",
suffix
]
end
@function_threshold 0.77
@max_suggestions 5
defp did_you_mean(module, function, exports) do
exports = exports || exports_for(module)
result =
case Keyword.take(exports, [function]) do
[] ->
base = Atom.to_string(function)
for {key, val} <- exports,
dist = String.jaro_distance(base, Atom.to_string(key)),
dist >= @function_threshold,
do: {dist, key, val}
arities ->
for {key, val} <- arities, do: {1.0, key, val}
end
|> Enum.sort(&(elem(&1, 0) >= elem(&2, 0)))
|> Enum.take(@max_suggestions)
|> Enum.sort(&(elem(&1, 1) <= elem(&2, 1)))
case result do
[] -> []
suggestions -> [". Did you mean one of:\n\n" | Enum.map(suggestions, &format_fa/1)]
end
end
defp format_fa({_dist, fun, arity}) do
[" * ", Code.Identifier.inspect_as_function(fun), ?/, Integer.to_string(arity), ?\n]
end
defp exports_for(module) do
if function_exported?(module, :__info__, 1) do
module.__info__(:macros) ++ module.__info__(:functions)
else
module.module_info(:exports)
end
rescue
# In case the module was removed while we are computing this
UndefinedFunctionError ->
[]
end
end
defmodule FunctionClauseError do
defexception [:module, :function, :arity, :kind, :args, :clauses]
def message(exception) do
case exception do
%{function: nil} ->
"no function clause matches"
%{module: module, function: function, arity: arity} ->
formatted = Exception.format_mfa(module, function, arity)
blamed = blame(exception, &inspect/1, &blame_match/2)
"no function clause matching in #{formatted}" <> blamed
end
end
def blame(%{module: module, function: function, arity: arity} = exception, stacktrace) do
case stacktrace do
[{^module, ^function, args, meta} | rest] when length(args) == arity ->
exception =
case Exception.blame_mfa(module, function, args) do
{:ok, kind, clauses} -> %{exception | args: args, kind: kind, clauses: clauses}
:error -> %{exception | args: args}
end
{exception, [{module, function, arity, meta} | rest]}
stacktrace ->
{exception, stacktrace}
end
end
defp blame_match(%{match?: true, node: node}, _), do: Macro.to_string(node)
defp blame_match(%{match?: false, node: node}, _), do: "-" <> Macro.to_string(node) <> "-"
defp blame_match(_, string), do: string
@doc false
def blame(%{args: nil}, _, _) do
""
end
def blame(exception, inspect_fun, ast_fun) do
%{module: module, function: function, arity: arity, kind: kind, args: args, clauses: clauses} =
exception
mfa = Exception.format_mfa(module, function, arity)
formatted_args =
args
|> Enum.with_index(1)
|> Enum.map(fn {arg, i} ->
["\n # ", Integer.to_string(i), "\n ", pad(inspect_fun.(arg)), "\n"]
end)
formatted_clauses =
if clauses do
format_clause_fun = fn {args, guards} ->
code = Enum.reduce(guards, {function, [], args}, &{:when, [], [&2, &1]})
" #{kind} " <> Macro.to_string(code, ast_fun) <> "\n"
end
top_10 =
clauses
|> Enum.take(10)
|> Enum.map(format_clause_fun)
[
"\nAttempted function clauses (showing #{length(top_10)} out of #{length(clauses)}):",
"\n\n",
top_10
]
else
""
end
"\n\nThe following arguments were given to #{mfa}:\n#{formatted_args}#{formatted_clauses}"
end
defp pad(string) do
String.replace(string, "\n", "\n ")
end
end
defmodule Code.LoadError do
defexception [:file, :message]
def exception(opts) do
file = Keyword.fetch!(opts, :file)
%Code.LoadError{message: "could not load #{file}", file: file}
end
end
defmodule Protocol.UndefinedError do
defexception [:protocol, :value, description: ""]
def message(%{protocol: protocol, value: value, description: description}) do
"protocol #{inspect(protocol)} not implemented for #{inspect(value)}" <>
maybe_description(description) <> maybe_available(protocol)
end
defp maybe_description(""), do: ""
defp maybe_description(description), do: ", " <> description
defp maybe_available(protocol) do
case protocol.__protocol__(:impls) do
{:consolidated, []} ->
". There are no implementations for this protocol."
{:consolidated, types} ->
". This protocol is implemented for: #{Enum.map_join(types, ", ", &inspect/1)}"
:not_consolidated ->
""
end
end
end
defmodule KeyError do
defexception [:key, :term]
def message(exception) do
msg = "key #{inspect(exception.key)} not found"
if exception.term != nil do
msg <> " in: #{inspect(exception.term)}"
else
msg
end
end
end
defmodule UnicodeConversionError do
defexception [:encoded, :message]
def exception(opts) do
%UnicodeConversionError{
encoded: Keyword.fetch!(opts, :encoded),
message: "#{Keyword.fetch!(opts, :kind)} #{detail(Keyword.fetch!(opts, :rest))}"
}
end
defp detail(rest) when is_binary(rest) do
"encoding starting at #{inspect(rest)}"
end
defp detail([h | _]) when is_integer(h) do
"code point #{h}"
end
defp detail([h | _]) do
detail(h)
end
end
defmodule Enum.OutOfBoundsError do
defexception message: "out of bounds error"
end
defmodule Enum.EmptyError do
defexception message: "empty error"
end
defmodule File.Error do
defexception [:reason, :path, action: ""]
def message(%{action: action, reason: reason, path: path}) do
formatted =
case {action, reason} do
{"remove directory", :eexist} ->
"directory is not empty"
_ ->
IO.iodata_to_binary(:file.format_error(reason))
end
"could not #{action} #{inspect(path)}: #{formatted}"
end
end
defmodule File.CopyError do
defexception [:reason, :source, :destination, on: "", action: ""]
def message(exception) do
formatted = IO.iodata_to_binary(:file.format_error(exception.reason))
location =
case exception.on() do
"" -> ""
on -> ". #{on}"
end
"could not #{exception.action} from #{inspect(exception.source)} to " <>
"#{inspect(exception.destination)}#{location}: #{formatted}"
end
end
defmodule File.LinkError do
defexception [:reason, :existing, :new, action: ""]
def message(exception) do
formatted = IO.iodata_to_binary(:file.format_error(exception.reason))
"could not #{exception.action} from #{inspect(exception.existing)} to " <>
"#{inspect(exception.new)}: #{formatted}"
end
end
defmodule ErlangError do
defexception [:original]
def message(exception) do
"Erlang error: #{inspect(exception.original)}"
end
@doc false
def normalize(:badarg, _stacktrace) do
%ArgumentError{}
end
def normalize(:badarith, _stacktrace) do
%ArithmeticError{}
end
def normalize(:system_limit, _stacktrace) do
%SystemLimitError{}
end
def normalize(:cond_clause, _stacktrace) do
%CondClauseError{}
end
def normalize({:badarity, {fun, args}}, _stacktrace) do
%BadArityError{function: fun, args: args}
end
def normalize({:badfun, term}, _stacktrace) do
%BadFunctionError{term: term}
end
def normalize({:badstruct, struct, term}, _stacktrace) do
%BadStructError{struct: struct, term: term}
end
def normalize({:badmatch, term}, _stacktrace) do
%MatchError{term: term}
end
def normalize({:badmap, term}, _stacktrace) do
%BadMapError{term: term}
end
def normalize({:badbool, op, term}, _stacktrace) do
%BadBooleanError{operator: op, term: term}
end
def normalize({:badkey, key}, stacktrace) do
term =
case ensure_stacktrace(stacktrace) do
[{Map, :get_and_update!, [map, _, _], _} | _] -> map
[{Map, :update!, [map, _, _], _} | _] -> map
[{:maps, :update, [_, _, map], _} | _] -> map
[{:maps, :get, [_, map], _} | _] -> map
_ -> nil
end
%KeyError{key: key, term: term}
end
def normalize({:badkey, key, map}, _stacktrace) do
%KeyError{key: key, term: map}
end
def normalize({:case_clause, term}, _stacktrace) do
%CaseClauseError{term: term}
end
def normalize({:with_clause, term}, _stacktrace) do
%WithClauseError{term: term}
end
def normalize({:try_clause, term}, _stacktrace) do
%TryClauseError{term: term}
end
def normalize(:undef, stacktrace) do
stacktrace = ensure_stacktrace(stacktrace)
{mod, fun, arity} = from_stacktrace(stacktrace)
%UndefinedFunctionError{module: mod, function: fun, arity: arity}
end
def normalize(:function_clause, stacktrace) do
{mod, fun, arity} = from_stacktrace(ensure_stacktrace(stacktrace))
%FunctionClauseError{module: mod, function: fun, arity: arity}
end
def normalize({:badarg, payload}, _stacktrace) do
%ArgumentError{message: "argument error: #{inspect(payload)}"}
end
def normalize(other, _stacktrace) do
%ErlangError{original: other}
end
defp ensure_stacktrace(nil) do
try do
:erlang.get_stacktrace()
rescue
_ -> []
end
end
defp ensure_stacktrace(stacktrace) do
stacktrace
end
defp from_stacktrace([{module, function, args, _} | _]) when is_list(args) do
{module, function, length(args)}
end
defp from_stacktrace([{module, function, arity, _} | _]) do
{module, function, arity}
end
defp from_stacktrace(_) do
{nil, nil, nil}
end
end
| 28.694313 | 153 | 0.653481 |
03439f09d8d4f3e00f02b798266766488da37362 | 1,259 | ex | Elixir | test/support/conn_case.ex | ding-an-sich/ex_change_rate | 89858694dc98cef6d21089599120600a075c93db | [
"MIT"
] | null | null | null | test/support/conn_case.ex | ding-an-sich/ex_change_rate | 89858694dc98cef6d21089599120600a075c93db | [
"MIT"
] | null | null | null | test/support/conn_case.ex | ding-an-sich/ex_change_rate | 89858694dc98cef6d21089599120600a075c93db | [
"MIT"
] | null | null | null | defmodule ExChangeRateWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use ExChangeRateWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
import Plug.Conn
import Phoenix.ConnTest
import ExChangeRateWeb.ConnCase
alias ExChangeRateWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint ExChangeRateWeb.Endpoint
end
end
setup tags do
pid = Ecto.Adapters.SQL.Sandbox.start_owner!(ExChangeRate.Repo, shared: not tags[:async])
on_exit(fn -> Ecto.Adapters.SQL.Sandbox.stop_owner(pid) end)
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 31.475 | 93 | 0.739476 |
0343bb8bda703ed7ae6fd2ce8b636a1b4a81b510 | 1,047 | exs | Elixir | test/socializer/conversation_user_test.exs | adam-h/socializer | 22bbb34a6e707af9cf608c5eeb6189d1f067a95b | [
"MIT"
] | 311 | 2019-04-21T22:15:08.000Z | 2022-01-23T14:07:03.000Z | test/socializer/conversation_user_test.exs | adam-h/socializer | 22bbb34a6e707af9cf608c5eeb6189d1f067a95b | [
"MIT"
] | 9 | 2020-09-07T09:38:58.000Z | 2022-02-26T18:07:44.000Z | test/socializer/conversation_user_test.exs | adam-h/socializer | 22bbb34a6e707af9cf608c5eeb6189d1f067a95b | [
"MIT"
] | 49 | 2019-04-22T01:29:50.000Z | 2022-03-23T04:34:35.000Z | defmodule Socializer.ConversationUserTest do
use SocializerWeb.ConnCase
alias Socializer.ConversationUser
describe "#find_by" do
it "finds conversation user by conditions" do
conversation = insert(:conversation)
user = insert(:user)
conversation_user = insert(:conversation_user, conversation: conversation, user: user)
found = ConversationUser.find_by(%{conversation_id: conversation.id, user_id: user.id})
assert found.id == conversation_user.id
end
end
describe "#changeset" do
it "validates with correct attributes" do
conversation = insert(:conversation)
user = insert(:user)
valid_attrs = %{conversation_id: conversation.id, user_id: user.id}
changeset = ConversationUser.changeset(%ConversationUser{}, valid_attrs)
assert changeset.valid?
end
it "does not validate with missing attrs" do
changeset =
ConversationUser.changeset(
%ConversationUser{},
%{}
)
refute changeset.valid?
end
end
end
| 29.083333 | 93 | 0.689589 |
0343c7c445e86e108713cc7a129c9bd7c8831221 | 15,309 | exs | Elixir | test/teslamate_web/controllers/car_controller_test.exs | dirkjanm/teslamate | 18beefbe694c74c1088ff6f87ef7b4c4190b451e | [
"MIT"
] | null | null | null | test/teslamate_web/controllers/car_controller_test.exs | dirkjanm/teslamate | 18beefbe694c74c1088ff6f87ef7b4c4190b451e | [
"MIT"
] | null | null | null | test/teslamate_web/controllers/car_controller_test.exs | dirkjanm/teslamate | 18beefbe694c74c1088ff6f87ef7b4c4190b451e | [
"MIT"
] | null | null | null | defmodule TeslaMateWeb.CarControllerTest do
use TeslaMateWeb.ConnCase
use TeslaMate.VehicleCase
alias TeslaMate.Settings.CarSettings
alias TeslaMate.{Log, Settings, Repo}
alias TeslaMate.Log.Car
defp table_row(html, key, value) do
assert {"tr", _, [{"td", _, [^key]}, {"td", [], [^value]}]} =
html
|> Floki.find("tr")
|> Enum.find(&match?({"tr", _, [{"td", _, [^key]}, _td]}, &1))
end
defp icon(html, tooltip, icon) do
icon_class = "mdi mdi-#{icon}"
assert {"span", _, [{"span", [{"class", ^icon_class}], _}]} =
html
|> Floki.find(".icons .icon")
|> Enum.find(&match?({"span", [_, {"data-tooltip", ^tooltip}], _}, &1))
end
defp car_fixture(settings) do
{:ok, car} =
Log.create_car(%{
efficiency: 0.153,
eid: 4242,
vid: 404,
vin: "xxxxx",
model: "S",
name: "foo",
trim_badging: "P100D"
})
{:ok, _settings} =
car.settings
|> Repo.preload(:car)
|> Settings.update_car_settings(settings)
car
end
describe "index" do
test "redirects if not signed in", %{conn: conn} do
assert conn = get(conn, Routes.car_path(conn, :index))
assert redirected_to(conn, 302) == Routes.live_path(conn, TeslaMateWeb.SignInLive.Index)
end
@tag :signed_in
test "lists all active vehicles", %{conn: conn} do
{:ok, _pid} =
start_supervised(
{ApiMock, name: :api_vehicle, events: [{:ok, online_event()}], pid: self()}
)
{:ok, _pid} =
start_supervised(
{TeslaMate.Vehicles,
vehicle: VehicleMock,
vehicles: [
%TeslaApi.Vehicle{display_name: "f0o", id: 4241, vehicle_id: 11111, vin: "1221"},
%TeslaApi.Vehicle{display_name: "fo0", id: 1242, vehicle_id: 22222, vin: "2112"}
]}
)
conn = get(conn, Routes.car_path(conn, :index))
html = response(conn, 200)
assert [
{"div", [{"class", "car card"}], _},
{"div", [{"class", "car card"}], _}
] = Floki.find(html, ".car")
end
@tag :signed_in
test "renders last knwon vehicle stats", %{conn: conn} do
events = [
{:ok, %TeslaApi.Vehicle{state: "asleep", display_name: "FooCar"}}
]
{:ok, car} =
%Car{settings: %CarSettings{}}
|> Car.changeset(%{vid: 404, eid: 404, vin: "xxxxx"})
|> Log.create_or_update_car()
{:ok, _position} =
Log.insert_position(car, %{
date: DateTime.utc_now(),
longitude: 0,
latitude: 0,
ideal_battery_range_km: 380.25,
est_battery_range_km: 401.52,
rated_battery_range_km: 175.1,
battery_level: 80,
outside_temp: 20.1,
inside_temp: 21.0
})
:ok = start_vehicles(events)
conn = get(conn, Routes.car_path(conn, :index))
assert html = response(conn, 200)
assert html =~ ~r/<p class="title is-5">FooCar<\/p>/
assert table_row(html, "Status", "asleep")
assert table_row(html, "Range (ideal)", "380.25 km")
assert table_row(html, "Range (est.)", "401.52 km")
assert table_row(html, "State of Charge", "80%")
assert table_row(html, "Outside temperature", "20.1 °C")
assert table_row(html, "Inside temperature", "21.0 °C")
end
@tag :signed_in
test "renders current vehicle stats [:online]", %{conn: conn} do
events = [
{:ok,
online_event(
display_name: "FooCar",
drive_state: %{timestamp: 0, latitude: 0.0, longitude: 0.0},
charge_state: %{
ideal_battery_range: 200,
est_battery_range: 180,
battery_range: 175,
battery_level: 69
},
climate_state: %{is_preconditioning: true, outside_temp: 24, inside_temp: 23.2},
vehicle_state: %{
software_update: %{status: "available"},
locked: true,
sentry_mode: true,
fd_window: 1,
fp_window: 0,
rd_window: 0,
rp_window: 0,
is_user_present: true
},
vehicle_config: %{car_type: "models2", trim_badging: "p90d"}
)}
]
:ok = start_vehicles(events)
Process.sleep(250)
conn = get(conn, Routes.car_path(conn, :index))
assert html = response(conn, 200)
assert html =~ ~r/<p class="title is-5">FooCar<\/p>/
assert html =~ ~r/<p class="subtitle is-6 has-text-weight-light">Model S P90D<\/p>/
assert table_row(html, "Status", "online")
assert table_row(html, "Range (ideal)", "321.87 km")
assert table_row(html, "Range (est.)", "289.68 km")
assert table_row(html, "State of Charge", "69%")
assert icon(html, "Locked", "lock")
assert icon(html, "Driver present", "account")
assert icon(html, "Preconditioning", "air-conditioner")
assert icon(html, "Sentry Mode", "shield-check")
assert icon(html, "Windows open", "window-open")
assert icon(html, "Software Update available", "gift-outline")
assert table_row(html, "Outside temperature", "24 °C")
assert table_row(html, "Inside temperature", "23.2 °C")
end
@tag :signed_in
test "renders current vehicle stats [:charging]", %{conn: conn} do
events = [
{:ok,
online_event(
display_name: "FooCar",
drive_state: %{timestamp: 0, latitude: 0.0, longitude: 0.0},
charge_state: %{
timestamp: 0,
charger_power: 11,
charger_phases: 3,
charger_voltage: 229,
charger_actual_current: 16,
ideal_battery_range: 200,
est_battery_range: 180,
battery_range: 175,
charging_state: "Charging",
charge_energy_added: "4.32",
charge_port_latch: "Engaged",
charge_port_door_open: true,
scheduled_charging_start_time: 1_565_620_707,
charge_limit_soc: 85,
time_to_full_charge: 1.83
}
)}
]
:ok = start_vehicles(events)
conn = get(conn, Routes.car_path(conn, :index))
assert html = response(conn, 200)
assert html =~ ~r/<p class="title is-5">FooCar<\/p>/
assert table_row(html, "Status", "charging")
assert table_row(html, "Remaining Time", "110 min")
assert icon(html, "Plugged in", "power-plug")
assert table_row(html, "Range (ideal)", "321.87 km")
assert table_row(html, "Range (est.)", "289.68 km")
assert table_row(html, "Charged", "4.32 kWh")
assert table_row(html, "Charger Power", "11 kW")
assert table_row(
html,
"Scheduled charging",
{"span", [{"data-date", "2019-08-12T14:38:27Z"}, {"phx-hook", "LocalTime"}], []}
)
assert table_row(html, "Charge limit", "85%")
end
@tag :signed_in
test "renders current vehicle stats [:driving]", %{conn: conn} do
events = [
{:ok,
online_event(
display_name: "FooCar",
drive_state: %{
timestamp: 0,
latitude: 0.0,
longitude: 0.0,
shift_state: "D",
speed: 30
}
)}
]
:ok = start_vehicles(events)
conn = get(conn, Routes.car_path(conn, :index))
assert html = response(conn, 200)
assert html =~ ~r/<p class="title is-5">FooCar<\/p>/
assert table_row(html, "Status", "driving")
assert table_row(html, "Speed", "48 km/h")
end
@tag :signed_in
test "renders current vehicle stats [:updating]", %{conn: conn} do
alias TeslaApi.Vehicle.State.VehicleState.SoftwareUpdate
events = [
{:ok,
online_event(
display_name: "FooCar",
vehicle_state: %{
car_version: "2019.8.4 530d1d3",
software_update: %SoftwareUpdate{expected_duration_sec: 2700, status: "installing"}
}
)}
]
:ok = start_vehicles(events)
conn = get(conn, Routes.car_path(conn, :index))
assert html = response(conn, 200)
assert html =~ ~r/<p class="title is-5">FooCar<\/p>/
assert table_row(html, "Status", "updating")
end
@tag :signed_in
test "renders current vehicle stats [:asleep]", %{conn: conn} do
events = [
{:ok, %TeslaApi.Vehicle{display_name: "FooCar", state: "asleep"}}
]
:ok = start_vehicles(events)
conn = get(conn, Routes.car_path(conn, :index))
assert html = response(conn, 200)
assert html =~ ~r/<p class="title is-5">FooCar<\/p>/
assert table_row(html, "Status", "asleep")
end
@tag :signed_in
test "renders current vehicle stats [:offline]", %{conn: conn} do
events = [
{:ok, %TeslaApi.Vehicle{display_name: "FooCar", state: "offline"}}
]
:ok = start_vehicles(events)
conn = get(conn, Routes.car_path(conn, :index))
assert html = response(conn, 200)
assert html =~ ~r/<p class="title is-5">FooCar<\/p>/
assert table_row(html, "Status", "offline")
end
@tag :signed_in
test "renders current vehicle stats [:falling asleep]", %{conn: conn} do
_car = car_fixture(%{suspend_min: 60, suspend_after_idle_min: 1})
events = [
{:ok,
online_event(
display_name: "FooCar",
drive_state: %{timestamp: 0, latitude: 0.0, longitude: 0.0},
climate_state: %{is_preconditioning: false}
)}
]
:ok = start_vehicles(events)
Process.sleep(100)
conn = get(conn, Routes.car_path(conn, :index))
assert html = response(conn, 200)
assert html =~ ~r/<p class="title is-5">FooCar<\/p>/
assert table_row(html, "Status", "falling asleep")
end
@tag :capture_log
@tag :signed_in
test "renders current vehicle stats [:unavailable]", %{conn: conn} do
events = [
{:error, :unknown}
]
:ok = start_vehicles(events)
conn = get(conn, Routes.car_path(conn, :index))
assert html = response(conn, 200)
assert html =~ ~r/<p class="title is-5"><\/p>/
assert table_row(html, "Status", "unavailable")
end
@tag :signed_in
test "displays the rated range if preferred", %{conn: conn} do
{:ok, _} =
Settings.get_global_settings!()
|> Settings.update_global_settings(%{preferred_range: :rated})
events = [
{:ok,
online_event(
display_name: "FooCar",
drive_state: %{timestamp: 0, latitude: 0.0, longitude: 0.0},
charge_state: %{
ideal_battery_range: 200,
est_battery_range: 180,
battery_range: 175,
battery_level: 69
},
climate_state: %{is_preconditioning: false, outside_temp: 24, inside_temp: 23.2},
vehicle_state: %{locked: true, sentry_mode: true},
vehicle_config: %{car_type: "models2", trim_badging: "p90d"}
)}
]
:ok = start_vehicles(events)
Process.sleep(250)
conn = get(conn, Routes.car_path(conn, :index))
assert html = response(conn, 200)
assert html =~ ~r/<p class="title is-5">FooCar<\/p>/
assert table_row(html, "Range (rated)", "281.64 km")
assert table_row(html, "Range (est.)", "289.68 km")
end
@tag :signed_in
test "displays imperial units", %{conn: conn} do
{:ok, _} =
Settings.get_global_settings!()
|> Settings.update_global_settings(%{unit_of_length: :mi, unit_of_temperature: :F})
events = [
{:ok,
online_event(
display_name: "FooCar",
drive_state: %{
timestamp: 0,
latitude: 0.0,
longitude: 0.0,
shift_state: "D",
speed: 30
},
charge_state: %{ideal_battery_range: 200, est_battery_range: 180, battery_range: 175},
climate_state: %{is_preconditioning: false, outside_temp: 24, inside_temp: 23.2}
)}
]
:ok = start_vehicles(events)
conn = get(conn, Routes.car_path(conn, :index))
assert html = response(conn, 200)
assert html =~ ~r/<p class="title is-5">FooCar<\/p>/
assert table_row(html, "Status", "driving")
assert table_row(html, "Range (ideal)", "200.0 mi")
assert table_row(html, "Range (est.)", "180.0 mi")
assert table_row(html, "Speed", "30 mph")
assert table_row(html, "Outside temperature", "75.2 °F")
assert table_row(html, "Inside temperature", "73.8 °F")
end
end
describe "supsend" do
setup %{conn: conn} do
{:ok, conn: put_req_header(conn, "accept", "application/json")}
end
test "suspends logging", %{conn: conn} do
_car = car_fixture(%{suspend_min: 60, suspend_after_idle_min: 60})
events = [
{:ok,
online_event(
display_name: "FooCar",
drive_state: %{timestamp: 0, latitude: 0.0, longitude: 0.0},
climate_state: %{is_preconditioning: false}
)}
]
:ok = start_vehicles(events)
%Car{id: id} = Log.get_car_by(vin: "xxxxx")
conn = put(conn, Routes.car_path(conn, :suspend_logging, id))
assert "" == response(conn, 204)
end
test "returns error if suspending is not possible", %{conn: conn} do
_car = car_fixture(%{suspend_min: 60, suspend_after_idle_min: 60})
events = [
{:ok,
online_event(
display_name: "FooCar",
drive_state: %{timestamp: 0, latitude: 0.0, longitude: 0.0},
climate_state: %{is_preconditioning: true}
)}
]
:ok = start_vehicles(events)
%Car{id: id} = Log.get_car_by(vin: "xxxxx")
conn = put(conn, Routes.car_path(conn, :suspend_logging, id))
assert "preconditioning" == json_response(conn, 412)["error"]
end
end
describe "resume" do
test "resumes logging", %{conn: conn} do
alias TeslaMate.Vehicles.Vehicle.Summary
_car = car_fixture(%{suspend_min: 60, suspend_after_idle_min: 1})
events = [
{:ok,
online_event(
display_name: "FooCar",
drive_state: %{timestamp: 0, latitude: 0.0, longitude: 0.0},
climate_state: %{is_preconditioning: false}
)}
]
:ok = start_vehicles(events)
Process.sleep(100)
%Car{id: id} = Log.get_car_by(vin: "xxxxx")
assert %Summary{state: :suspended} = TeslaMate.Vehicles.summary(id)
conn = put(conn, Routes.car_path(conn, :resume_logging, id))
assert "" == response(conn, 204)
end
end
def start_vehicles(events \\ []) do
{:ok, _pid} = start_supervised({ApiMock, name: :api_vehicle, events: events, pid: self()})
{:ok, _pid} =
start_supervised(
{TeslaMate.Vehicles,
vehicle: VehicleMock,
vehicles: [
%TeslaApi.Vehicle{
display_name: "foo",
id: 4242,
vehicle_id: 404,
vin: "xxxxx"
}
]}
)
:ok
end
end
| 30.556886 | 97 | 0.559214 |
0343efc2d224e16dbf15c67670ff040efbdd3e6a | 1,334 | ex | Elixir | lib/structs/data_types/backup_record.ex | silbermm/memelex | 1ab62069ceb4deb773b479b6ed21ac34ae0bfab5 | [
"Apache-2.0"
] | 5 | 2021-11-03T03:45:59.000Z | 2021-12-14T04:39:06.000Z | lib/structs/data_types/backup_record.ex | silbermm/memelex | 1ab62069ceb4deb773b479b6ed21ac34ae0bfab5 | [
"Apache-2.0"
] | 1 | 2021-12-14T18:26:14.000Z | 2021-12-14T18:26:14.000Z | lib/structs/data_types/backup_record.ex | silbermm/memelex | 1ab62069ceb4deb773b479b6ed21ac34ae0bfab5 | [
"Apache-2.0"
] | 3 | 2021-12-07T01:51:55.000Z | 2021-12-08T22:21:46.000Z | defmodule Memex.BackupRecord do
@enforce_keys [:uuid, :timepoint, :version]
@derive Jason.Encoder
defstruct [
uuid: nil, # we require UUIDs for precision when pattern-matching
label: nil, # If the user wants to leave any special text data for this backup, leave it here
timepoint: nil, # The DateTime, stored in unix format, of when we made this backup
version: nil, # usually we version backups by date, but if we take multiples on the same day, we use this to keep track. e.,g. "01", "02", "14", etc
hash: nil, # take a hash of the entire backup #TODO
location: nil, # where the backup is stored
module: __MODULE__ # this allows us to reconstruct the correct Elixir struct from the JSON text files
]
def construct(params) do
valid_params =
params
|> Map.merge(%{timepoint: Memex.My.current_time() |> DateTime.to_unix()})
|> Memex.Utils.ToolBag.generate_uuid()
Kernel.struct(__MODULE__, valid_params |> convert_to_keyword_list())
end
def convert_to_keyword_list(map) do
# https://stackoverflow.com/questions/54616306/convert-a-map-into-a-keyword-list-in-elixir
map |> Keyword.new(fn {k,v} -> {k,v} end) #keys are already atoms in this case
end
end | 37.055556 | 165 | 0.650675 |
03440ed6a5f25ff57896437c7f63b9688ff7f123 | 593 | ex | Elixir | advent_umbrella_2016/apps/day10/lib/lookandsay.ex | lauromoura/adventofcode | 320dc1ea7099fbc7c3ffcbc406bfc0aa236c3b78 | [
"Unlicense"
] | null | null | null | advent_umbrella_2016/apps/day10/lib/lookandsay.ex | lauromoura/adventofcode | 320dc1ea7099fbc7c3ffcbc406bfc0aa236c3b78 | [
"Unlicense"
] | null | null | null | advent_umbrella_2016/apps/day10/lib/lookandsay.ex | lauromoura/adventofcode | 320dc1ea7099fbc7c3ffcbc406bfc0aa236c3b78 | [
"Unlicense"
] | null | null | null | defmodule LookAndSay do
@moduledoc """
Implements the Look-and-Say sequence for strings.
"""
def say(amount) do
chars = [head | _tail] = String.to_char_list(amount)
chars
|> say_count(head, 0)
|> List.to_string
end
defp say_count([current|tail], current, current_count) do
say_count(tail, current, current_count + 1)
end
defp say_count([new|tail], current, current_count) do
[(to_char_list current_count), current] ++ say_count(tail, new, 1)
end
defp say_count([], current, current_count) do
[(to_char_list current_count), current]
end
end
| 24.708333 | 70 | 0.688027 |
034430ed3b568d77326900b20c9c5a47d8458596 | 16,331 | ex | Elixir | lib/verbalex.ex | tylerbarker/verbalex | f0dc0a10897729256e1835ab9e8ae8dff8adac02 | [
"Apache-2.0"
] | 27 | 2019-04-25T01:53:54.000Z | 2021-08-15T00:50:36.000Z | lib/verbalex.ex | tylerbarker/verbalex | f0dc0a10897729256e1835ab9e8ae8dff8adac02 | [
"Apache-2.0"
] | null | null | null | lib/verbalex.ex | tylerbarker/verbalex | f0dc0a10897729256e1835ab9e8ae8dff8adac02 | [
"Apache-2.0"
] | null | null | null | defmodule Verbalex do
@moduledoc """
Documentation for Verbalex.
"""
defguardp are_binaries(x, y) when is_binary(x) and is_binary(y)
defguardp are_integers(x, y) when is_integer(x) and is_integer(y)
@classes ~w(
alnum
alpha
ascii
blank
cntrl
digit
graph
lower
print
punct
space
upper
word
xdigit
)a
@type character_class ::
:alnum
| :alpha
| :ascii
| :blank
| :cntrl
| :digit
| :graph
| :lower
| :print
| :punct
| :space
| :upper
| :word
| :xdigit
@type set_opt ::
{:class, character_class}
| {:classes, list(character_class)}
| {:string, String.t()}
@type set_opts :: list(set_opt)
### Anchors ###
@doc ~S"""
Express the start of a line regex anchor. Translates to '^'.
## Examples
iex> alias Verbalex, as: Vlx
iex> Vlx.start_of_line() |> Vlx.find("A long time ago...") |> Regex.compile!()
~r/^(?:A\ long\ time\ ago\.\.\.)/
"""
@spec start_of_line(binary()) :: binary()
def start_of_line(before \\ "") when is_binary(before), do: before <> "^"
@doc ~S"""
Express the end of a line regex anchor. Translates to '$'.
## Examples
iex> alias Verbalex, as: Vlx
iex> Vlx.find("far, far, away.") |> Vlx.end_of_line() |> Regex.compile!()
~r/(?:far,\ far,\ away\.)$/
"""
@spec end_of_line(binary()) :: binary()
def end_of_line(before \\ "") when is_binary(before), do: before <> "$"
@doc ~S"""
Express a string literal to be matched exactly and wraps in a non-capturing group. Escapes special characters by default.
Turn off character escapes with the `escape: false` option.
## Examples
iex> alias Verbalex, as: Vlx
iex> Vlx.find("www.") |> Vlx.then("example.com")
"(?:www\\.)(?:example\\.com)"
"""
@spec then(binary(), binary(), [{:escape, false}]) :: binary()
def then(before, string, opts \\ [])
def then(before, string, escape: false) when are_binaries(before, string),
do: before <> "(?:" <> string <> ")"
def then(before, string, _) when are_binaries(before, string),
do: before <> "(?:" <> Regex.escape(string) <> ")"
@doc ~S"""
Express a string literal to be matched exactly and wraps in a non-capturing group.
Alias of `then/3` for readability semantics, as 'find' is a makes more sense when placed at the beginning of a pipe chain.
**NOTE:** Under the current implementation, when using the `escape: false` option in
pipe chains beginning with `find`, an empty string must be provided as the first value.
## Examples
iex> alias Verbalex, as: Vlx
iex> Vlx.find("needle") |> Vlx.if_preceded_by("hay") |> Vlx.if_followed_by("stack")
"(?<=hay)(?:needle)(?=stack)"
"""
@spec find(binary(), binary(), [{:escape, false}]) :: binary()
def find(before, string, opts \\ [])
def find(before, string, escape: false) when are_binaries(before, string),
do: then(before, string, escape: false)
def find(before, string, _) when are_binaries(before, string), do: then(before, string)
@spec find(binary()) :: binary()
def find(string) when is_binary(string), do: then("", string)
### Quantifiers ###
@doc ~S"""
Specify the number of expected occurences to be found on a given expression.
## Examples
iex> alias Verbalex, as: Vlx
iex> Vlx.digit() |> Vlx.capture() |> Vlx.occurs(4)
"(\d){4}"
"""
@spec occurs(binary(), integer()) :: binary()
def occurs(before, n) when is_binary(before) and is_integer(n), do: before <> "{#{n}}"
@doc ~S"""
Specify the minimum number of expected occurences to be found on a given expression.
## Examples
iex> alias Verbalex, as: Vlx
iex> Vlx.digit() |> Vlx.capture() |> Vlx.occurs_at_least(2)
"(\d){2,}"
"""
@spec occurs_at_least(binary(), integer()) :: binary()
def occurs_at_least(before, n) when is_binary(before) and is_integer(n), do: before <> "{#{n},}"
@doc ~S"""
Specify the range of expected occurences to be found on a given expression.
## Examples
iex> alias Verbalex, as: Vlx
iex> Vlx.digit() |> Vlx.capture() |> Vlx.occurs_in_range(3, 6)
"(\d){3,6}"
"""
@spec occurs_in_range(binary(), integer(), integer()) :: binary()
def occurs_in_range(before, min, max) when is_binary(before) and are_integers(min, max),
do: before <> "{#{min},#{max}}"
@doc """
Express a pattern will occur zero or more times.
## Examples
iex> alias Verbalex, as: Vlx
iex> "" |> Vlx.anything_in(string: "#@$%", class: :alnum) |> Vlx.zero_or_more()
"[#@$%[:alnum:]]*"
"""
@spec zero_or_more(binary(), binary(), [{:escape, false}]) :: binary()
def zero_or_more(before, string \\ "", opts \\ [])
def zero_or_more(before, string, escape: false) when are_binaries(before, string),
do: before <> string <> "*"
def zero_or_more(before, string, _) when are_binaries(before, string),
do: before <> Regex.escape(string) <> "*"
@doc """
Express a pattern will occur one or more times.
## Examples
iex> alias Verbalex, as: Vlx
iex> "" |> Vlx.anything_in(class: :alnum) |> Vlx.one_or_more()
"[[:alnum:]]+"
"""
@spec one_or_more(binary(), binary(), [{:escape, false}]) :: binary()
def one_or_more(before, string \\ "", opts \\ [])
def one_or_more(before, string, escape: false) when are_binaries(before, string),
do: before <> string <> "+"
def one_or_more(before, string, _) when are_binaries(before, string),
do: before <> Regex.escape(string) <> "+"
@doc ~S"""
Express a pattern as being optional to match.
## Examples
iex> alias Verbalex, as: Vlx
iex> Vlx.find("colo") |> Vlx.maybe("u") |> Vlx.then("r")
"(?:colo)u?(?:r)"
"""
@spec maybe(binary(), binary(), [{:escape, false}]) :: binary()
def maybe(before, string \\ "", opts \\ [])
def maybe(before, string, escape: false) when are_binaries(before, string),
do: before <> string <> "?"
def maybe(before, string, _) when are_binaries(before, string),
do: before <> Regex.escape(string) <> "?"
### Special Characters
@doc ~S"""
Matches any character. Equivalent to "."
"""
@spec anything(binary()) :: binary()
def anything(before \\ "") when is_binary(before), do: before <> "."
@doc ~S"""
Match any of the characters in a given character class, classes, or string - provided as opts.
The supported class names (provided with `class: ...`, or `classes: [...]`) are:
* `:alnum` - Letters and digits
* `:alpha` - Letters
* `:ascii` - Character codes 0-127
* `:blank` - Space or tab only
* `:cntrl` - Control characters
* `:digit` - Decimal digits (same as \\d)
* `:graph` - Printing characters, excluding space
* `:lower` - Lowercase letters
* `:print` - Printing characters, including space
* `:punct` - Printing characters, excluding letters, digits, and space
* `:space` - Whitespace (the same as \s from PCRE 8.34)
* `:upper` - Uppercase letters
* `:word ` - "Word" characters (same as \w)
* `:xdigit` - Hexadecimal digits
## Examples
iex> alias Verbalex, as: Vlx
iex> "" |> Vlx.anything_in(class: :alnum, string: "._%+-") |> Vlx.one_or_more()
"[[:alnum:]._%+-]+"
iex> alias Verbalex, as: Vlx
iex> "" |> Vlx.anything_in(classes: [:lower, :punct])
"[[:lower:][:punct:]]"
"""
@spec anything_in(binary(), set_opts()) :: binary()
def anything_in(before, opts \\ []) do
set_string =
opts
|> Enum.map(&build_set_piece/1)
|> Enum.join()
before <> "[#{set_string}]"
end
@doc """
Inverse of `anything_in/2`.
"""
@spec anything_not_in(binary(), set_opts()) :: binary()
def anything_not_in(before, opts \\ []) do
set_string =
opts
|> Enum.map(&build_set_piece/1)
|> Enum.join()
before <> "[^#{set_string}]"
end
defp build_set_piece({:class, class}) when class in @classes, do: "[:#{class}:]"
defp build_set_piece({:string, string}) when is_binary(string), do: string
defp build_set_piece({:classes, classes}) when is_list(classes),
do: classes |> Enum.map(fn class -> build_set_piece({:class, class}) end) |> Enum.join()
@doc ~S"""
Matches a linebreak character. Equivalent to "\n".
"""
@spec linebreak(binary()) :: binary()
def linebreak(before \\ "") when is_binary(before), do: before <> "\n"
@doc ~S"""
Matches a tab character. Equivalent to "\t".
"""
@spec tab(binary()) :: binary()
def tab(before \\ "") when is_binary(before), do: before <> "\t"
@doc ~S"""
Matches a carriage return character. Equivalent to "\r".
"""
@spec carriage_return(binary()) :: binary()
def carriage_return(before \\ "") when is_binary(before), do: before <> "\r"
@doc ~S"""
Matches a digit (0-9). Equivalent to "\d".
"""
@spec digit(binary()) :: binary()
def digit(before \\ "") when is_binary(before), do: before <> "\d"
@doc ~S"""
Matches anything but a digit (0-9). Equivalent to "\D".
"""
@spec not_digit(binary()) :: binary()
def not_digit(before \\ "") when is_binary(before), do: before <> "\D"
@doc ~S"""
Matches a 'word', equivalent to [a-zA-Z0-9_]
"""
@spec word(binary()) :: binary()
def word(before \\ "") when is_binary(before), do: before <> "\w"
@doc ~S"""
Matches a 'word boundary' - a word bounary is the position between a word and non-word character (`0-9A-Za-z_]`).
It is commonly thought of as matching the beginning or end of a string.
"""
@spec word_boundary(binary()) :: binary()
def word_boundary(before \\ "") when is_binary(before), do: before <> "\b"
@doc ~S"""
Matches anything but a 'word', equivalent to [^a-zA-Z0-9_]
"""
@spec not_word(binary()) :: binary()
def not_word(before \\ "") when is_binary(before), do: before <> "\W"
@doc ~S"""
Matches a whitespace character - includes tabs and line breaks.
"""
@spec whitespace(binary()) :: binary()
def whitespace(before \\ "") when is_binary(before), do: before <> "\s"
@doc ~S"""
Matches anything but whitespace character - includes tabs and line breaks.
"""
@spec not_whitespace(binary()) :: binary()
def not_whitespace(before \\ "") when is_binary(before), do: before <> "\S"
### Grouping & Capturing
@doc ~S"""
Wraps a regex string in a capturing group.
## Examples
iex> alias Verbalex, as: Vlx
iex> pattern = Vlx.find("this") |> Vlx.then("that")
iex> Vlx.capture(pattern)
"((?:this)(?:that))"
"""
@spec capture(binary(), binary(), [{:escape, false}]) :: binary()
def capture(before, string, opts \\ [])
def capture(before, string, escape: false) when are_binaries(before, string),
do: before <> "(" <> string <> ")"
def capture(before, string, _) when are_binaries(before, string),
do: before <> "(" <> Regex.escape(string) <> ")"
def capture(before \\ "") when is_binary(before), do: "(" <> before <> ")"
@doc ~S"""
Wraps a regex string in a named-capturing group. To be utilised with `Regex.named_captures(regex, string, opts \\ [])`.
## Examples
iex> alias Verbalex, as: Vlx
iex> Vlx.find("http") |> Vlx.maybe("s") |> Vlx.then("://") |> Vlx.capture_as("protocols")
"(?<protocols>(?:http)s?(?::\/\/))"
"""
@spec capture_as(binary(), binary()) :: binary()
def capture_as(before, as) when are_binaries(before, as), do: "(?<#{as}>" <> before <> ")"
@doc ~S"""
Wraps a regex string in a non-capturing group. This facilitates easy application of
things like quantifiers or backreferences to the entirety of a given expression.
## Examples
iex> alias Verbalex, as: Vlx
iex> protocol = Vlx.find("http") |> Vlx.maybe("s") |> Vlx.then("://") |> Vlx.atomize()
iex> Vlx.maybe(protocol) |> Vlx.then("www.")
"(?:(?:http)s?(?:://))?(?:www\\.)"
"""
@spec atomize(binary()) :: binary()
def atomize(string) when is_binary(string), do: "(?:" <> string <> ")"
@spec atomize(binary(), binary()) :: binary()
def atomize(before, string) when are_binaries(before, string),
do: before <> "(?:" <> string <> ")"
### Logic ###
@doc ~S"""
Takes in a list of regex strings, and inserts an or operator between each of them.
Does not escape characters by default to preference OR-ing complex expressions and encourage composition.
Special character escaping is available via the `escape: true` option.
## Examples
iex> alias Verbalex, as: Vlx
iex> accepted_exprs = ["match", "any", "of", "these"]
iex> Vlx.or_expressions(accepted_exprs)
"(match|any|of|these)"
"""
@spec or_expressions(list(binary()), binary(), [{:escape, false}]) :: binary()
def or_expressions(subexps, string \\ "", opts \\ [])
def or_expressions([last | []], string, escape: true) when are_binaries(last, string),
do: "#{string}#{Regex.escape(last)})"
def or_expressions([head | tail], "", escape: true) when is_binary(head) do
or_expressions(tail, "(#{Regex.escape(head)}|")
end
def or_expressions([head | tail], string, escape: true) when are_binaries(head, string) do
or_expressions(tail, "#{string}#{Regex.escape(head)}|")
end
def or_expressions([last | []], string, _) when are_binaries(last, string),
do: "#{string}#{last})"
def or_expressions([head | tail], "", _) when is_binary(head),
do: or_expressions(tail, "(#{head}|")
def or_expressions([head | tail], string, _) when are_binaries(head, string),
do: or_expressions(tail, "#{string}#{head}|")
### Lookarounds ###
@doc ~S"""
Takes a regex string and applies a lookahead condition. Escapes special characters by default.
## Examples
iex> alias Verbalex, as: Vlx
iex> Vlx.find("sentence") |> Vlx.if_followed_by(".") |> Vlx.capture()
"((?:sentence)(?=\\.))"
"""
@spec if_followed_by(binary(), binary(), [{:escape, false}]) :: binary()
def if_followed_by(before, string, opts \\ [])
def if_followed_by(before, string, escape: false) when are_binaries(before, string) do
before <> "(?=" <> string <> ")"
end
def if_followed_by(before, string, _) when are_binaries(before, string) do
before <> "(?=" <> Regex.escape(string) <> ")"
end
@doc ~S"""
Takes a regex string and applies a negative lookahead condition. Escapes special characters by default.
## Examples
iex> alias Verbalex, as: Vlx
iex> Vlx.find("good") |> Vlx.if_not_followed_by("bye") |> Vlx.capture()
"((?:good)(?!bye))"
"""
@spec if_not_followed_by(binary(), binary(), [{:escape, false}]) :: binary()
def if_not_followed_by(before, string, opts \\ [])
def if_not_followed_by(before, string, escape: false) when are_binaries(before, string) do
before <> "(?!" <> string <> ")"
end
def if_not_followed_by(before, string, _) when are_binaries(before, string) do
before <> "(?!" <> Regex.escape(string) <> ")"
end
@doc ~S"""
Takes a regex string and applies a lookbehind condition. Escapes special characters by default.
## Examples
iex> alias Verbalex, as: Vlx
iex> Vlx.digit() |> Vlx.if_preceded_by("$") |> Vlx.capture()
"((?<=\\$)\d)"
"""
@spec if_preceded_by(binary(), binary(), [{:escape, false}]) :: binary()
def if_preceded_by(before, string, opts \\ [])
def if_preceded_by(before, string, escape: false) when are_binaries(before, string) do
"(?<=" <> string <> ")" <> before
end
def if_preceded_by(before, string, _) when are_binaries(before, string) do
"(?<=" <> Regex.escape(string) <> ")" <> before
end
@doc ~S"""
Takes a regex string and applies a negative lookbehind condition. Escapes special characters by default.
## Examples
iex> alias Verbalex, as: Vlx
iex> Vlx.digit() |> Vlx.if_not_preceded_by("%") |> Vlx.capture()
"((?<!%)\d)"
"""
@spec if_not_preceded_by(binary(), binary(), [{:escape, false}]) :: binary()
def if_not_preceded_by(before, string, opts \\ [])
def if_not_preceded_by(before, string, escape: false) when are_binaries(before, string) do
"(?<!" <> string <> ")" <> before
end
def if_not_preceded_by(before, string, _) when are_binaries(before, string) do
"(?<!" <> Regex.escape(string) <> ")" <> before
end
end
| 32.021569 | 124 | 0.61123 |
0344568c04b49f0b166d94476cfad518c722818c | 6,914 | ex | Elixir | clients/cloud_scheduler/lib/google_api/cloud_scheduler/v1/model/job.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_scheduler/lib/google_api/cloud_scheduler/v1/model/job.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_scheduler/lib/google_api/cloud_scheduler/v1/model/job.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudScheduler.V1.Model.Job do
@moduledoc """
Configuration for a job. The maximum allowed size for a job is 100KB.
## Attributes
* `appEngineHttpTarget` (*type:* `GoogleApi.CloudScheduler.V1.Model.AppEngineHttpTarget.t`, *default:* `nil`) - App Engine HTTP target.
* `attemptDeadline` (*type:* `String.t`, *default:* `nil`) - The deadline for job attempts. If the request handler does not respond by this deadline then the request is cancelled and the attempt is marked as a `DEADLINE_EXCEEDED` failure. The failed attempt can be viewed in execution logs. Cloud Scheduler will retry the job according to the RetryConfig. The allowed duration for this deadline is: * For HTTP targets, between 15 seconds and 30 minutes. * For App Engine HTTP targets, between 15 seconds and 24 hours.
* `description` (*type:* `String.t`, *default:* `nil`) - Optionally caller-specified in CreateJob or UpdateJob. A human-readable description for the job. This string must not contain more than 500 characters.
* `httpTarget` (*type:* `GoogleApi.CloudScheduler.V1.Model.HttpTarget.t`, *default:* `nil`) - HTTP target.
* `lastAttemptTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The time the last job attempt started.
* `name` (*type:* `String.t`, *default:* `nil`) - Optionally caller-specified in CreateJob, after which it becomes output only. The job name. For example: `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. * `PROJECT_ID` can contain letters ([A-Za-z]), numbers ([0-9]), hyphens (-), colons (:), or periods (.). For more information, see [Identifying projects](https://cloud.google.com/resource-manager/docs/creating-managing-projects#identifying_projects) * `LOCATION_ID` is the canonical ID for the job's location. The list of available locations can be obtained by calling ListLocations. For more information, see https://cloud.google.com/about/locations/. * `JOB_ID` can contain only letters ([A-Za-z]), numbers ([0-9]), hyphens (-), or underscores (_). The maximum length is 500 characters.
* `pubsubTarget` (*type:* `GoogleApi.CloudScheduler.V1.Model.PubsubTarget.t`, *default:* `nil`) - Pub/Sub target.
* `retryConfig` (*type:* `GoogleApi.CloudScheduler.V1.Model.RetryConfig.t`, *default:* `nil`) - Settings that determine the retry behavior.
* `schedule` (*type:* `String.t`, *default:* `nil`) - Required, except when used with UpdateJob. Describes the schedule on which the job will be executed. The schedule can be either of the following types: * [Crontab](http://en.wikipedia.org/wiki/Cron#Overview) * English-like [schedule](https://cloud.google.com/scheduler/docs/configuring/cron-job-schedules) As a general rule, execution `n + 1` of a job will not begin until execution `n` has finished. Cloud Scheduler will never allow two simultaneously outstanding executions. For example, this implies that if the `n+1`th execution is scheduled to run at 16:00 but the `n`th execution takes until 16:15, the `n+1`th execution will not start until `16:15`. A scheduled start time will be delayed if the previous execution has not ended when its scheduled time occurs. If retry_count > 0 and a job attempt fails, the job will be tried a total of retry_count times, with exponential backoff, until the next scheduled start time.
* `scheduleTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The next time the job is scheduled. Note that this may be a retry of a previously failed attempt or the next execution time according to the schedule.
* `state` (*type:* `String.t`, *default:* `nil`) - Output only. State of the job.
* `status` (*type:* `GoogleApi.CloudScheduler.V1.Model.Status.t`, *default:* `nil`) - Output only. The response from the target for the last attempted execution.
* `timeZone` (*type:* `String.t`, *default:* `nil`) - Specifies the time zone to be used in interpreting schedule. The value of this field must be a time zone name from the [tz database](http://en.wikipedia.org/wiki/Tz_database). Note that some time zones include a provision for daylight savings time. The rules for daylight saving time are determined by the chosen tz. For UTC use the string "utc". If a time zone is not specified, the default will be in UTC (also known as GMT).
* `userUpdateTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The creation time of the job.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:appEngineHttpTarget => GoogleApi.CloudScheduler.V1.Model.AppEngineHttpTarget.t() | nil,
:attemptDeadline => String.t() | nil,
:description => String.t() | nil,
:httpTarget => GoogleApi.CloudScheduler.V1.Model.HttpTarget.t() | nil,
:lastAttemptTime => DateTime.t() | nil,
:name => String.t() | nil,
:pubsubTarget => GoogleApi.CloudScheduler.V1.Model.PubsubTarget.t() | nil,
:retryConfig => GoogleApi.CloudScheduler.V1.Model.RetryConfig.t() | nil,
:schedule => String.t() | nil,
:scheduleTime => DateTime.t() | nil,
:state => String.t() | nil,
:status => GoogleApi.CloudScheduler.V1.Model.Status.t() | nil,
:timeZone => String.t() | nil,
:userUpdateTime => DateTime.t() | nil
}
field(:appEngineHttpTarget, as: GoogleApi.CloudScheduler.V1.Model.AppEngineHttpTarget)
field(:attemptDeadline)
field(:description)
field(:httpTarget, as: GoogleApi.CloudScheduler.V1.Model.HttpTarget)
field(:lastAttemptTime, as: DateTime)
field(:name)
field(:pubsubTarget, as: GoogleApi.CloudScheduler.V1.Model.PubsubTarget)
field(:retryConfig, as: GoogleApi.CloudScheduler.V1.Model.RetryConfig)
field(:schedule)
field(:scheduleTime, as: DateTime)
field(:state)
field(:status, as: GoogleApi.CloudScheduler.V1.Model.Status)
field(:timeZone)
field(:userUpdateTime, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.CloudScheduler.V1.Model.Job do
def decode(value, options) do
GoogleApi.CloudScheduler.V1.Model.Job.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudScheduler.V1.Model.Job do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 80.395349 | 984 | 0.724327 |
0344c8077827cf29a2d6f8e80b1a0a911d054159 | 2,555 | ex | Elixir | lib/epi_locator_web.ex | RatioPBC/epi-locator | 58c90500c4e0071ce365d76ec9812f9051d6a9f9 | [
"Apache-2.0"
] | null | null | null | lib/epi_locator_web.ex | RatioPBC/epi-locator | 58c90500c4e0071ce365d76ec9812f9051d6a9f9 | [
"Apache-2.0"
] | 6 | 2021-10-19T01:55:57.000Z | 2022-02-15T01:04:19.000Z | lib/epi_locator_web.ex | RatioPBC/epi-locator | 58c90500c4e0071ce365d76ec9812f9051d6a9f9 | [
"Apache-2.0"
] | 2 | 2022-01-21T08:38:50.000Z | 2022-01-21T08:42:04.000Z | defmodule EpiLocatorWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use EpiLocatorWeb, :controller
use EpiLocatorWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: EpiLocatorWeb
import Plug.Conn
import EpiLocatorWeb.Gettext
alias EpiLocatorWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/epi_locator_web/templates",
namespace: EpiLocatorWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 1, get_flash: 2, view_module: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def live_view do
quote do
use Phoenix.LiveView,
layout: {EpiLocatorWeb.LayoutView, "live.html"}
unquote(view_helpers())
end
end
def live_component do
quote do
use Phoenix.LiveComponent
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
import Phoenix.LiveView.Router
end
end
def channel do
quote do
use Phoenix.Channel
import EpiLocatorWeb.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import LiveView helpers (live_render, live_component.ex, live_patch, etc)
import Phoenix.LiveView.Helpers
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import EpiLocatorWeb.ErrorHelpers
import EpiLocatorWeb.Gettext
alias EpiLocatorWeb.Router.Helpers, as: Routes
def noreply(socket), do: {:noreply, socket}
def ok(socket), do: {:ok, socket}
@spec app_name() :: String.t()
def app_name do
System.get_env("APP_NAME", gettext("app name"))
end
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 23.227273 | 83 | 0.678278 |
0344e2b0d573548039c85d0885f37c86f4ae9c2e | 9,587 | ex | Elixir | lib/elixir/lib/supervisor/spec.ex | felix-alonso/elixir | 71503e07bdb8413b11cf2a46d38c648c72fa28bf | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/supervisor/spec.ex | felix-alonso/elixir | 71503e07bdb8413b11cf2a46d38c648c72fa28bf | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/supervisor/spec.ex | felix-alonso/elixir | 71503e07bdb8413b11cf2a46d38c648c72fa28bf | [
"Apache-2.0"
] | null | null | null | defmodule Supervisor.Spec do
@moduledoc """
Outdated functions for building child specifications.
The functions in this module are deprecated and they do not work
with the module-based child specs introduced in Elixir v1.5.
Please see the `Supervisor` documentation instead.
Convenience functions for defining supervisor specifications.
## Example
By using the functions in this module one can specify the children
to be used under a supervisor, started with `Supervisor.start_link/2`:
import Supervisor.Spec
children = [
worker(MyWorker, [arg1, arg2, arg3]),
supervisor(MySupervisor, [arg1])
]
Supervisor.start_link(children, strategy: :one_for_one)
Sometimes, it may be handy to define supervisors backed
by a module:
defmodule MySupervisor do
use Supervisor
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg)
end
def init(arg) do
children = [
worker(MyWorker, [arg], restart: :temporary)
]
supervise(children, strategy: :simple_one_for_one)
end
end
Notice in this case we don't have to explicitly import
`Supervisor.Spec` as `use Supervisor` automatically does so.
Defining a module-based supervisor can be useful, for example,
to perform initialization tasks in the `c:Supervisor.init/1` callback.
## Supervisor and worker options
In the example above, we defined specs for workers and supervisors.
These specs (both for workers as well as supervisors) accept the
following options:
* `:id` - a name used to identify the child specification
internally by the supervisor; defaults to the given module
name for the child worker/supervisor
* `:function` - the function to invoke on the child to start it
* `:restart` - an atom that defines when a terminated child process should
be restarted (see the "Restart values" section below)
* `:shutdown` - an atom that defines how a child process should be
terminated (see the "Shutdown values" section below)
* `:modules` - it should be a list with one element `[module]`,
where module is the name of the callback module only if the
child process is a `Supervisor` or `GenServer`; if the child
process is a `GenEvent`, `:modules` should be `:dynamic`
### Restart values (:restart)
The following restart values are supported in the `:restart` option:
* `:permanent` - the child process is always restarted
* `:temporary` - the child process is never restarted (not even
when the supervisor's strategy is `:rest_for_one` or `:one_for_all`)
* `:transient` - the child process is restarted only if it
terminates abnormally, i.e., with an exit reason other than
`:normal`, `:shutdown` or `{:shutdown, term}`
Notice that supervisor that reached maximum restart intensity will exit with `:shutdown` reason.
In this case the supervisor will only restart if its child specification was defined with
the `:restart` option set to `:permanent` (the default).
### Shutdown values (`:shutdown`)
The following shutdown values are supported in the `:shutdown` option:
* `:brutal_kill` - the child process is unconditionally terminated
using `Process.exit(child, :kill)`
* `:infinity` - if the child process is a supervisor, this is a mechanism
to give the subtree enough time to shut down; it can also be used with
workers with care
* a non-negative integer - the amount of time in milliseconds
that the supervisor tells the child process to terminate by calling
`Process.exit(child, :shutdown)` and then waits for an exit signal back.
If no exit signal is received within the specified time,
the child process is unconditionally terminated
using `Process.exit(child, :kill)`
"""
@moduledoc deprecated:
"Use the new child specifications outlined in the Supervisor module instead"
@typedoc "Supported strategies"
@type strategy :: :simple_one_for_one | :one_for_one | :one_for_all | :rest_for_one
@typedoc "Supported restart values"
@type restart :: :permanent | :transient | :temporary
@typedoc "Supported shutdown values"
@type shutdown :: timeout | :brutal_kill
@typedoc "Supported worker values"
@type worker :: :worker | :supervisor
@typedoc "Supported module values"
@type modules :: :dynamic | [module]
@typedoc "Supported ID values"
@type child_id :: term
@typedoc "The supervisor specification"
@type spec ::
{child_id, start_fun :: {module, atom, [term]}, restart, shutdown, worker, modules}
@doc """
Receives a list of `children` (workers or supervisors) to
supervise and a set of `options`.
Returns a tuple containing the supervisor specification. This tuple can be
used as the return value of the `c:Supervisor.init/1` callback when implementing a
module-based supervisor.
## Examples
supervise(children, strategy: :one_for_one)
## Options
* `:strategy` - the restart strategy option. It can be either
`:one_for_one`, `:rest_for_one`, `:one_for_all`, or
`:simple_one_for_one`. You can learn more about strategies
in the `Supervisor` module docs.
* `:max_restarts` - the maximum number of restarts allowed in
a time frame. Defaults to `3`.
* `:max_seconds` - the time frame in which `:max_restarts` applies.
Defaults to `5`.
The `:strategy` option is required and by default a maximum of 3 restarts is
allowed within 5 seconds. Check the `Supervisor` module for a detailed
description of the available strategies.
"""
@spec supervise(
[spec],
strategy: strategy,
max_restarts: non_neg_integer,
max_seconds: pos_integer
) :: {:ok, tuple}
@deprecated "Use the new child specifications outlined in the Supervisor module instead"
def supervise(children, options) do
unless strategy = options[:strategy] do
raise ArgumentError, "expected :strategy option to be given"
end
maxR = Keyword.get(options, :max_restarts, 3)
maxS = Keyword.get(options, :max_seconds, 5)
assert_unique_ids(Enum.map(children, &get_id/1))
{:ok, {{strategy, maxR, maxS}, children}}
end
defp get_id({id, _, _, _, _, _}) do
id
end
defp get_id(other) do
raise ArgumentError,
"invalid tuple specification given to supervise/2. If you are trying to use " <>
"the map child specification that is part of the Elixir v1.5, use Supervisor.init/2 " <>
"instead of Supervisor.Spec.supervise/2. See the Supervisor module for more information. " <>
"Got: #{inspect(other)}"
end
defp assert_unique_ids([id | rest]) do
if id in rest do
raise ArgumentError,
"duplicated ID #{inspect(id)} found in the supervisor specification, " <>
"please explicitly pass the :id option when defining this worker/supervisor"
else
assert_unique_ids(rest)
end
end
defp assert_unique_ids([]) do
:ok
end
@doc """
Defines the given `module` as a worker which will be started
with the given arguments.
worker(ExUnit.Runner, [], restart: :permanent)
By default, the function `start_link` is invoked on the given
module. Overall, the default values for the options are:
[
id: module,
function: :start_link,
restart: :permanent,
shutdown: 5000,
modules: [module]
]
See the "Supervisor and worker options" section in the `Supervisor.Spec` module for more
information on the available options.
"""
@spec worker(
module,
[term],
restart: restart,
shutdown: shutdown,
id: term,
function: atom,
modules: modules
) :: spec
@deprecated "Use the new child specifications outlined in the Supervisor module instead"
def worker(module, args, options \\ []) do
child(:worker, module, args, options)
end
@doc """
Defines the given `module` as a supervisor which will be started
with the given arguments.
supervisor(module, [], restart: :permanent)
By default, the function `start_link` is invoked on the given
module. Overall, the default values for the options are:
[
id: module,
function: :start_link,
restart: :permanent,
shutdown: :infinity,
modules: [module]
]
See the "Supervisor and worker options" section in the `Supervisor.Spec` module for more
information on the available options.
"""
@spec supervisor(
module,
[term],
restart: restart,
shutdown: shutdown,
id: term,
function: atom,
modules: modules
) :: spec
@deprecated "Use the new child specifications outlined in the Supervisor module instead"
def supervisor(module, args, options \\ []) do
options = Keyword.put_new(options, :shutdown, :infinity)
child(:supervisor, module, args, options)
end
defp child(type, module, args, options) do
id = Keyword.get(options, :id, module)
modules = Keyword.get(options, :modules, modules(module))
function = Keyword.get(options, :function, :start_link)
restart = Keyword.get(options, :restart, :permanent)
shutdown = Keyword.get(options, :shutdown, 5000)
{id, {module, function, args}, restart, shutdown, type, modules}
end
defp modules(GenEvent), do: :dynamic
defp modules(module), do: [module]
end
| 33.058621 | 105 | 0.676124 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.