hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
733dfa24e804ddb6cbab132e2e295a5c8c5db960 | 491 | ex | Elixir | apps/ewallet_api/lib/ewallet_api/endpoint.ex | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_api/lib/ewallet_api/endpoint.ex | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_api/lib/ewallet_api/endpoint.ex | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | defmodule EWalletAPI.Endpoint do
use Phoenix.Endpoint, otp_app: :ewallet_api
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
plug(Phoenix.CodeReloader)
end
plug(Plug.RequestId)
plug(Plug.Logger)
plug(
Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
)
plug(Plug.MethodOverride)
plug(Plug.Head)
plug(EWalletAPI.Router)
end
| 19.64 | 54 | 0.706721 |
733e0c49c346cbff197f78a5d10abaee2b4f1225 | 5,734 | exs | Elixir | test/unit/iri_expansion_test.exs | rustra/jsonld-ex | 4f903a04d4ba9f0f0df1b871a92eb89bc20f38dc | [
"MIT"
] | 12 | 2020-06-18T18:23:34.000Z | 2022-01-05T15:00:09.000Z | test/unit/iri_expansion_test.exs | rustra/jsonld-ex | 4f903a04d4ba9f0f0df1b871a92eb89bc20f38dc | [
"MIT"
] | 5 | 2020-06-16T16:18:14.000Z | 2021-08-19T20:23:03.000Z | test/unit/iri_expansion_test.exs | rustra/jsonld-ex | 4f903a04d4ba9f0f0df1b871a92eb89bc20f38dc | [
"MIT"
] | 6 | 2020-06-17T22:25:51.000Z | 2021-08-19T19:55:49.000Z | defmodule JSON.LD.IRIExpansionTest do
use ExUnit.Case, async: false
import JSON.LD.IRIExpansion
setup do
context =
JSON.LD.context(%{
"@base" => "http://base/",
"@vocab" => "http://vocab/",
"ex" => "http://example.org/",
"" => "http://empty/",
"_" => "http://underscore/"
})
%{example_context: context}
end
test "bnode", %{example_context: context} do
assert expand_iri("_:a", context) == "_:a"
end
describe "relative IRI with no options" do
# TODO: Test this with RDF.URIs and RDF.BlankNodes
# %{
# "absolute IRI" => ["http://example.org/", ~I<http://example.org/>],
# "term" => ["ex", ~I<ex>],
# "prefix:suffix" => ["ex:suffix", ~I<http://example.org/suffix>],
# "keyword" => ["@type", "@type"],
# "empty" => [":suffix", ~I<http://empty/suffix>],
# "unmapped" => ["foo", ~I<foo>],
# "empty term" => ["", ~I<>],
# "another abs IRI"=>["ex://foo", ~I<ex://foo>],
# "absolute IRI looking like a curie" =>
# ["foo:bar", ~I<foo:bar>],
# "bnode" => ["_:t0", RDF.bnode("t0")],
# "_" => ["_", ~I<_>],
# }
%{
"absolute IRI" => ["http://example.org/", "http://example.org/"],
"term" => ["ex", "ex"],
"prefix:suffix" => ["ex:suffix", "http://example.org/suffix"],
"keyword" => ["@type", "@type"],
"empty" => [":suffix", "http://empty/suffix"],
"unmapped" => ["foo", "foo"],
"empty term" => ["", ""],
"another abs IRI" => ["ex://foo", "ex://foo"],
"absolute IRI looking like a curie" => ["foo:bar", "foo:bar"],
"bnode" => ["_:t0", "_:t0"],
"_" => ["_", "_"]
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: [input, result], example_context: context} do
assert expand_iri(input, context) == result
end
end)
end
describe "relative IRI with base IRI" do
# TODO: Test this with RDF.URIs and RDF.BlankNodes
# %{
# "absolute IRI" => ["http://example.org/", ~I<http://example.org/>],
# "term" => ["ex", ~I<http://base/ex>],
# "prefix:suffix" => ["ex:suffix", ~I<http://example.org/suffix>],
# "keyword" => ["@type", "@type"],
# "empty" => [":suffix", ~I<http://empty/suffix>],
# "unmapped" => ["foo", ~I<http://base/foo>],
# "empty term" => ["", ~I<http://base/>],
# "another abs IRI"=>["ex://foo", ~I<ex://foo>],
# "absolute IRI looking like a curie" =>
# ["foo:bar", ~I<foo:bar>],
# "bnode" => ["_:t0", RDF.bnode("t0")],
# "_" => ["_", ~I<http://base/_>],
# }
%{
"absolute IRI" => ["http://example.org/", "http://example.org/"],
"term" => ["ex", "http://base/ex"],
"prefix:suffix" => ["ex:suffix", "http://example.org/suffix"],
"keyword" => ["@type", "@type"],
"empty" => [":suffix", "http://empty/suffix"],
"unmapped" => ["foo", "http://base/foo"],
"empty term" => ["", "http://base/"],
"another abs IRI" => ["ex://foo", "ex://foo"],
"absolute IRI looking like a curie" => ["foo:bar", "foo:bar"],
"bnode" => ["_:t0", "_:t0"],
"_" => ["_", "http://base/_"]
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: [input, result], example_context: context} do
assert expand_iri(input, context, true) == result
end
end)
end
describe "relative IRI @vocab" do
# TODO: Test this with RDF.URIs and RDF.BlankNodes
# %{
# "absolute IRI" => ["http://example.org/", ~I<http://example.org/>],
# "term" => ["ex", ~I<http://example.org/>],
# "prefix:suffix" => ["ex:suffix", ~I<http://example.org/suffix>],
# "keyword" => ["@type", "@type"],
# "empty" => [":suffix", ~I<http://empty/suffix>],
# "unmapped" => ["foo", ~I<http://vocab/foo>],
# "empty term" => ["", ~I<http://empty/>],
# "another abs IRI"=>["ex://foo", ~I<ex://foo>],
# "absolute IRI looking like a curie" =>
# ["foo:bar", ~I<foo:bar>],
# "bnode" => ["_:t0", RDF.bode("t0")],
# "_" => ["_", ~I<http://underscore/>],
# }
%{
"absolute IRI" => ["http://example.org/", "http://example.org/"],
"term" => ["ex", "http://example.org/"],
"prefix:suffix" => ["ex:suffix", "http://example.org/suffix"],
"keyword" => ["@type", "@type"],
"empty" => [":suffix", "http://empty/suffix"],
"unmapped" => ["foo", "http://vocab/foo"],
"empty term" => ["", "http://empty/"],
"another abs IRI" => ["ex://foo", "ex://foo"],
"absolute IRI looking like a curie" => ["foo:bar", "foo:bar"],
"bnode" => ["_:t0", "_:t0"],
"_" => ["_", "http://underscore/"]
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: [input, result], example_context: context} do
assert expand_iri(input, context, false, true) == result
end
end)
end
end
| 42.791045 | 85 | 0.420998 |
733e100e5ca8c0137b5d3857cd194e4187414061 | 575 | ex | Elixir | lib/elixir_microservice_base/application.ex | g13ydson/elixir_microservice_base | 1048c77198d5ec1c3b5eab90b38f736ee2f6c8be | [
"MIT"
] | 5 | 2019-07-26T11:22:30.000Z | 2021-07-04T00:07:15.000Z | lib/elixir_microservice_base/application.ex | g13ydson/elixir_microservice_base | 1048c77198d5ec1c3b5eab90b38f736ee2f6c8be | [
"MIT"
] | 4 | 2019-07-26T16:08:43.000Z | 2019-10-21T09:15:21.000Z | lib/elixir_microservice_base/application.ex | emcasa/elixir_microservice_base | cbfe61d3959320edbcd159ba0dce4cb247420b01 | [
"MIT"
] | null | null | null | defmodule ElixirMicroserviceBase.Application do
@moduledoc false
use Application
@port Application.get_env(:elixir_microservice_base, :port, 4000)
def start(_type, _args) do
ElixirMicroserviceBase.Metrics.setup()
{:ok, _} = Logger.add_backend(Sentry.LoggerBackend)
children = [
Plug.Cowboy.child_spec(
scheme: :http,
plug: ElixirMicroserviceBase.Router,
options: [port: @port]
)
]
opts = [strategy: :one_for_one, name: ElixirMicroserviceBase.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 23.958333 | 76 | 0.697391 |
733e28681b2d5854a7dfea5e37c50300e32edebc | 274 | exs | Elixir | config/test.exs | pastleo/iex_line_bot | 73d02b45adc05bc7331fa5f88859861d04a2e71f | [
"MIT"
] | 1 | 2019-06-24T23:55:26.000Z | 2019-06-24T23:55:26.000Z | config/test.exs | pastleo/iex_line_bot | 73d02b45adc05bc7331fa5f88859861d04a2e71f | [
"MIT"
] | null | null | null | config/test.exs | pastleo/iex_line_bot | 73d02b45adc05bc7331fa5f88859861d04a2e71f | [
"MIT"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :iex_line_bot, IexLineBotWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 24.909091 | 56 | 0.744526 |
733e691a98dc7fcf66bdeba5fe893ee15730636e | 1,459 | exs | Elixir | godin_umbrella/apps/godin/mix.exs | ruben44bac/god-n | af78dc683b58bb3a5e4dbfec2dd53887651a8aa7 | [
"MIT"
] | null | null | null | godin_umbrella/apps/godin/mix.exs | ruben44bac/god-n | af78dc683b58bb3a5e4dbfec2dd53887651a8aa7 | [
"MIT"
] | null | null | null | godin_umbrella/apps/godin/mix.exs | ruben44bac/god-n | af78dc683b58bb3a5e4dbfec2dd53887651a8aa7 | [
"MIT"
] | null | null | null | defmodule Godin.MixProject do
use Mix.Project
def project do
[
app: :godin,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.5",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Godin.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:ecto_sql, "~> 3.0"},
{:postgrex, ">= 0.0.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 25.155172 | 79 | 0.594243 |
733e7569a3c8f875108ff663a5511c60efef6cb4 | 7,633 | exs | Elixir | test/support/schema.exs | andriybohdan/ex_admin | e31c725078ac4e7390204a87d96360a21ffe7b90 | [
"MIT"
] | 1 | 2018-08-30T20:20:56.000Z | 2018-08-30T20:20:56.000Z | test/support/schema.exs | 8thlight/ex_admin | 314d4068270c47799ec54f719073a565222bcfad | [
"MIT"
] | null | null | null | test/support/schema.exs | 8thlight/ex_admin | 314d4068270c47799ec54f719073a565222bcfad | [
"MIT"
] | 1 | 2020-06-06T18:08:16.000Z | 2020-06-06T18:08:16.000Z | defmodule TestExAdmin.User do
import Ecto.Changeset
use Ecto.Schema
import Ecto.Query
schema "users" do
field :name, :string
field :email, :string
field :active, :boolean, default: true
has_many :products, TestExAdmin.Product, on_replace: :delete
has_many :noids, TestExAdmin.Noid
many_to_many :roles, TestExAdmin.Role, join_through: TestExAdmin.UserRole, on_replace: :delete
end
@fields ~w(name active email)a
@required_fields ~w(email)a
def changeset(model, params \\ %{}) do
model
|> cast(params, @fields)
|> validate_required(@required_fields)
|> cast_assoc(:noids, required: false)
|> cast_assoc(:products, required: false)
|> add_roles(params)
end
def add_roles(changeset, params) do
if Enum.count(Map.get(params, :roles, [])) > 0 do
ids = params[:roles]
roles = TestExAdmin.Repo.all(from r in TestExAdmin.Role, where: r.id in ^ids)
put_assoc(changeset, :roles, roles)
else
changeset
end
end
end
defmodule TestExAdmin.Role do
use Ecto.Schema
import Ecto.Changeset
alias TestExAdmin.Repo
schema "roles" do
field :name, :string
has_many :uses_roles, TestExAdmin.UserRole
many_to_many :users, TestExAdmin.User, join_through: TestExAdmin.UserRole
end
@fields ~w(name)a
def changeset(model, params \\ %{}) do
model
|> cast(params, @fields)
|> validate_required(@fields)
end
def all do
Repo.all __MODULE__
end
end
defmodule TestExAdmin.UserRole do
use Ecto.Schema
import Ecto.Changeset
schema "users_roles" do
belongs_to :user, TestExAdmin.User
belongs_to :role, TestExAdmin.Role
timestamps()
end
@fields ~w(user_id role_id)a
def changeset(model, params \\ %{}) do
model
|> cast(params, @fields)
|> validate_required(@fields)
end
end
defmodule TestExAdmin.Product do
use Ecto.Schema
import Ecto.Changeset
schema "products" do
field :_destroy, :boolean, virtual: true
field :title, :string
field :price, :decimal
belongs_to :user, TestExAdmin.User
end
@fields ~w(title price user_id)a
@required_fields ~w(title price)a
def changeset(model, params \\ %{}) do
model
|> cast(params, @fields)
|> validate_required(@required_fields)
|> mark_for_deletion()
end
defp mark_for_deletion(changeset) do
# If delete was set and it is true, let's change the action
if get_change(changeset, :_destroy) do
%{changeset | action: :delete}
else
changeset
end
end
end
defmodule TestExAdmin.Noid do
import Ecto.Changeset
use Ecto.Schema
@primary_key {:name, :string, []}
# @derive {Phoenix.Param, key: :name}
schema "noids" do
field :description, :string
field :company, :string
belongs_to :user, TestExAdmin.User, foreign_key: :user_id, references: :id
end
@fields ~w(name description company user_id)a
@required_fields ~w(name description)a
def changeset(model, params \\ %{}) do
model
|> cast(params, @fields)
|> validate_required(@required_fields)
end
end
defmodule TestExAdmin.Noprimary do
import Ecto.Changeset
use Ecto.Schema
@primary_key false
schema "noprimarys" do
field :index, :integer
field :name, :string
field :description, :string
timestamps()
end
@fields ~w(index description name)a
@required_fields ~w(name)a
def changeset(model, params \\ %{}) do
model
|> cast(params, @fields)
|> validate_required(@required_fields)
end
end
defmodule TestExAdmin.Simple do
import Ecto.Changeset
use Ecto.Schema
schema "simples" do
field :name, :string
field :description, :string
field :exists?, :boolean, virtual: true
timestamps()
end
@fields ~w(name description)a
@required_fields ~w(name)a
def changeset(model, params \\ %{}) do
Agent.update(__MODULE__, fn (_v) -> "changeset" end)
model
|> cast(params, @fields)
|> validate_required(@required_fields)
end
def start_link do
Agent.start_link(fn -> nil end, name: __MODULE__)
end
def changeset_create(model, params \\ %{}) do
Agent.update(__MODULE__, fn (_v) -> "changeset_create" end)
model
|> cast(params, @fields)
|> validate_required(@required_fields)
end
def changeset_update(model, params \\ %{}) do
Agent.update(__MODULE__, fn (_v) -> "changeset_update" end)
model
|> cast(params, @fields)
|> validate_required(@required_fields)
end
def last_changeset do
Agent.get(__MODULE__, fn changeset -> changeset end)
end
def stop do
Agent.stop(__MODULE__)
end
end
defmodule TestExAdmin.Restricted do
import Ecto.Changeset
use Ecto.Schema
schema "restricteds" do
field :name, :string
field :description, :string
end
@fields ~w(name description)a
@required_fields ~w(name)a
def changeset(model, params \\ %{}) do
model
|> cast(params, @fields)
|> validate_required(@required_fields)
end
end
defmodule TestExAdmin.PhoneNumber do
import Ecto.Changeset
use Ecto.Schema
import Ecto.Query
alias __MODULE__
alias TestExAdmin.Repo
schema "phone_numbers" do
field :number, :string
field :label, :string
has_many :contacts_phone_numbers, TestExAdmin.ContactPhoneNumber
has_many :contacts, through: [:contacts_phone_numbers, :contact]
timestamps()
end
@fields ~w(number label)a
def changeset(model, params \\ %{}) do
model
|> cast(params, @fields)
|> validate_required(@fields)
end
def labels, do: ["Primary Phone", "Secondary Phone", "Home Phone",
"Work Phone", "Mobile Phone", "Other Phone"]
def all_labels do
(from p in PhoneNumber, group_by: p.label, select: p.label)
|> Repo.all
end
end
defmodule TestExAdmin.Contact do
import Ecto.Changeset
use Ecto.Schema
schema "contacts" do
field :first_name, :string
field :last_name, :string
has_many :contacts_phone_numbers, TestExAdmin.ContactPhoneNumber
has_many :phone_numbers, through: [:contacts_phone_numbers, :phone_number]
timestamps()
end
@fields ~w(first_name last_name)a
def changeset(model, params \\ %{}) do
model
|> cast(params, @fields)
|> validate_required(@fields)
end
end
defmodule TestExAdmin.ContactPhoneNumber do
import Ecto.Changeset
use Ecto.Schema
schema "contacts_phone_numbers" do
belongs_to :contact, TestExAdmin.Contact
belongs_to :phone_number, TestExAdmin.PhoneNumber
end
@fields ~w(contact_id phone_number_id)a
def changeset(model, params \\ %{}) do
model
|> cast(params, @fields)
|> validate_required(@fields)
end
end
defmodule TestExAdmin.UUIDSchema do
import Ecto.Changeset
use Ecto.Schema
@primary_key {:key, :binary_id, autogenerate: true}
schema "uuid_schemas" do
field :name, :string
timestamps()
end
@fields ~w(name)
def changeset(model, params \\ %{}) do
model
|> cast(params, @fields)
|> validate_required(@fields)
end
end
defmodule TestExAdmin.ModelDisplayName do
use Ecto.Schema
schema "model_display_name" do
field :first, :string
field :name, :string
field :other, :string
end
def display_name(resource) do
resource.other
end
def model_name do
"custom_name"
end
end
defmodule TestExAdmin.DefnDisplayName do
use Ecto.Schema
schema "defn_display_name" do
field :first, :string
field :second, :string
field :name, :string
end
end
defmodule TestExAdmin.Maps do
use Ecto.Schema
schema "maps" do
field :name, :string
field :addresses, {:array, :map}
field :stats, :map
end
end
| 21.501408 | 98 | 0.6861 |
733e7eca949641eaa6bcc211178c78d6c056e1b1 | 1,578 | exs | Elixir | test/untrusted/validator_functions_test.exs | elbow-jason/untrusted | 8b8108f46f34fd418f291d7c066b41241890d98a | [
"MIT"
] | 1 | 2020-01-13T21:39:11.000Z | 2020-01-13T21:39:11.000Z | test/untrusted/validator_functions_test.exs | elbow-jason/untrusted | 8b8108f46f34fd418f291d7c066b41241890d98a | [
"MIT"
] | null | null | null | test/untrusted/validator_functions_test.exs | elbow-jason/untrusted | 8b8108f46f34fd418f291d7c066b41241890d98a | [
"MIT"
] | null | null | null | defmodule Untrusted.ValidatorFunctionsTest do
use ExUnit.Case
describe "must_be_one_of/1" do
test "returns a function of arity 1" do
func = Untrusted.ValidatorFunctions.must_be_one_of([1, 2, 3])
assert is_function(func, 1)
end
test "func returns an ok-value-tuple when value is in the list" do
item = 1
items = [1, 2, 3]
func = Untrusted.ValidatorFunctions.must_be_one_of(items)
assert item in items
assert func.(item) == {:ok, item}
end
test "func returns an error when value is not in the list" do
item = :no
items = [1, 2, 3]
func = Untrusted.ValidatorFunctions.must_be_one_of(items)
assert item not in items
assert func.(item) == {:error, {:must_be_one_of, items}}
end
test "can handle a MapSet" do
set = MapSet.new([1, 2, 3])
func = Untrusted.ValidatorFunctions.must_be_one_of(set)
assert is_function(func, 1)
assert 1 in set
assert func.(1) == {:ok, 1}
end
end
describe "must_be_key_of/1" do
test "returns a function of arity 1" do
mapping = %{
"ONE" => 1,
"TWO" => 2
}
func = Untrusted.ValidatorFunctions.must_be_key_of(mapping)
assert is_function(func, 1)
end
test "func returns an ok-value-tuple when value is in the keys of the map" do
mapping = %{
"ONE" => 1,
"TWO" => 2
}
func = Untrusted.ValidatorFunctions.must_be_key_of(mapping)
assert Map.get(mapping, "ONE") == 1
assert func.("ONE") == {:ok, 1}
end
end
end
| 27.206897 | 81 | 0.614068 |
733ea09bbe6a5088b5e5ebb663db95fa91d009ca | 11,129 | exs | Elixir | lib/elixir/test/elixir/io/ansi/docs_test.exs | wstrinz/elixir | 1048b34d6c816f8e5dbd4fdbaaf9baa41b4f0d95 | [
"Apache-2.0"
] | 1 | 2021-04-28T21:35:01.000Z | 2021-04-28T21:35:01.000Z | lib/elixir/test/elixir/io/ansi/docs_test.exs | wstrinz/elixir | 1048b34d6c816f8e5dbd4fdbaaf9baa41b4f0d95 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/io/ansi/docs_test.exs | wstrinz/elixir | 1048b34d6c816f8e5dbd4fdbaaf9baa41b4f0d95 | [
"Apache-2.0"
] | 1 | 2021-09-30T01:21:02.000Z | 2021-09-30T01:21:02.000Z | Code.require_file("../../test_helper.exs", __DIR__)
defmodule IO.ANSI.DocsTest do
use ExUnit.Case, async: true
import ExUnit.CaptureIO
def format_heading(str) do
capture_io(fn -> IO.ANSI.Docs.print_heading(str, []) end) |> String.trim_trailing()
end
def format(str) do
capture_io(fn -> IO.ANSI.Docs.print(str, []) end) |> String.trim_trailing()
end
test "heading is formatted" do
result = format_heading("wibble")
assert String.starts_with?(result, "\e[0m\n\e[7m\e[33m")
assert String.ends_with?(result, "\e[0m\n\e[0m")
assert String.contains?(result, " wibble ")
end
test "first level heading is converted" do
result = format("# wibble\n\ntext\n")
assert result == "\e[33m# wibble\e[0m\n\e[0m\ntext\n\e[0m"
end
test "second level heading is converted" do
result = format("## wibble\n\ntext\n")
assert result == "\e[33m## wibble\e[0m\n\e[0m\ntext\n\e[0m"
end
test "third level heading is converted" do
result = format("### wibble\n\ntext\n")
assert result == "\e[33m### wibble\e[0m\n\e[0m\ntext\n\e[0m"
end
test "code block is converted" do
result = format("line\n\n code\n code2\n\nline2\n")
assert result == "line\n\e[0m\n\e[36m code\n code2\e[0m\n\e[0m\nline2\n\e[0m"
end
test "fenced code block is converted" do
result = format("line\n```\ncode\ncode2\n```\nline2\n")
assert result == "line\n\e[0m\n\e[36m code\n code2\e[0m\n\e[0m\nline2\n\e[0m"
result = format("line\n```elixir\ncode\ncode2\n```\nline2\n")
assert result == "line\n\e[0m\n\e[36m code\n code2\e[0m\n\e[0m\nline2\n\e[0m"
result = format("line\n~~~elixir\ncode\n```\n~~~\nline2\n")
assert result == "line\n\e[0m\n\e[36m code\n ```\e[0m\n\e[0m\nline2\n\e[0m"
end
test "* list is converted" do
result = format("* one\n* two\n* three\n")
assert result == " • one\n • two\n • three\n\e[0m"
end
test "* list surrounded by text is converted" do
result = format("Count:\n\n* one\n* two\n* three\n\nDone")
assert result == "Count:\n\e[0m\n • one\n • two\n • three\n\e[0m\nDone\n\e[0m"
end
test "* list with continuation is converted" do
result = format("* one\ntwo\n\n three\nfour\n* five")
assert result == " • one two\n three four\n\e[0m\n • five\n\e[0m"
end
test "* nested lists are converted" do
result = format("* one\n * one.one\n * one.two\n* two")
assert result == " • one\n • one.one\n • one.two\n\e[0m\n • two\n\e[0m"
end
test "* lists with spaces are converted" do
result = format(" * one\n * two\n * three")
assert result == " • one\n • two\n • three\n\e[0m"
end
test "* lists with code" do
result = format(" * one\n two three")
assert result == " • one\n\e[36m two three\e[0m\n\e[0m\n\e[0m"
end
test "- list is converted" do
result = format("- one\n- two\n- three\n")
assert result == " • one\n • two\n • three\n\e[0m"
end
test "+ list is converted" do
result = format("+ one\n+ two\n+ three\n")
assert result == " • one\n • two\n • three\n\e[0m"
end
test "+ and - nested lists are converted" do
result = format("- one\n + one.one\n + one.two\n- two")
assert result == " • one\n • one.one\n • one.two\n\e[0m\n • two\n\e[0m"
end
test "paragraphs are split" do
result = format("para1\n\npara2")
assert result == "para1\n\e[0m\npara2\n\e[0m"
end
test "extra whitespace is ignored between paras" do
result = format("para1\n \npara2")
assert result == "para1\n\e[0m\npara2\n\e[0m"
end
test "extra whitespace doesn't mess up a following list" do
result = format("para1\n \n* one\n* two")
assert result == "para1\n\e[0m\n • one\n • two\n\e[0m"
end
test "star/underscore/backtick works" do
result = format("*world*")
assert result == "\e[1mworld\e[0m\n\e[0m"
result = format("*world*.")
assert result == "\e[1mworld\e[0m.\n\e[0m"
result = format("**world**")
assert result == "\e[1mworld\e[0m\n\e[0m"
result = format("_world_")
assert result == "\e[4mworld\e[0m\n\e[0m"
result = format("`world`")
assert result == "\e[36mworld\e[0m\n\e[0m"
end
test "star/underscore/backtick works across words" do
result = format("*hello world*")
assert result == "\e[1mhello world\e[0m\n\e[0m"
result = format("**hello world**")
assert result == "\e[1mhello world\e[0m\n\e[0m"
result = format("_hello world_")
assert result == "\e[4mhello world\e[0m\n\e[0m"
result = format("`hello world`")
assert result == "\e[36mhello world\e[0m\n\e[0m"
end
test "multiple stars/underscores/backticks work" do
result = format("*hello world* *hello world*")
assert result == "\e[1mhello world\e[0m \e[1mhello world\e[0m\n\e[0m"
result = format("_hello world_ _hello world_")
assert result == "\e[4mhello world\e[0m \e[4mhello world\e[0m\n\e[0m"
result = format("`hello world` `hello world`")
assert result == "\e[36mhello world\e[0m \e[36mhello world\e[0m\n\e[0m"
end
test "multiple stars/underscores/backticks work when separated by other words" do
result = format("*hello world* unit test *hello world*")
assert result == "\e[1mhello world\e[0m unit test \e[1mhello world\e[0m\n\e[0m"
result = format("_hello world_ unit test _hello world_")
assert result == "\e[4mhello world\e[0m unit test \e[4mhello world\e[0m\n\e[0m"
result = format("`hello world` unit test `hello world`")
assert result == "\e[36mhello world\e[0m unit test \e[36mhello world\e[0m\n\e[0m"
end
test "star/underscore preceded by space doesn't get interpreted" do
result = format("_unit _size")
assert result == "_unit _size\n\e[0m"
result = format("**unit **size")
assert result == "**unit **size\n\e[0m"
result = format("*unit *size")
assert result == "*unit *size\n\e[0m"
end
test "star/underscore/backtick preceded by non-space delimiters gets interpreted" do
result = format("(`hello world`)")
assert result == "(\e[36mhello world\e[0m)\n\e[0m"
result = format("<`hello world`>")
assert result == "<\e[36mhello world\e[0m>\n\e[0m"
result = format("(*hello world*)")
assert result == "(\e[1mhello world\e[0m)\n\e[0m"
result = format("@*hello world*@")
assert result == "@\e[1mhello world\e[0m@\n\e[0m"
result = format("(_hello world_)")
assert result == "(\e[4mhello world\e[0m)\n\e[0m"
result = format("'_hello world_'")
assert result == "'\e[4mhello world\e[0m'\n\e[0m"
end
test "star/underscore/backtick starts/ends within a word doesn't get interpreted" do
result = format("foo_bar, foo_bar_baz!")
assert result == "foo_bar, foo_bar_baz!\n\e[0m"
result = format("_foo_bar")
assert result == "_foo_bar\n\e[0m"
result = format("foo_bar_")
assert result == "foo_bar_\n\e[0m"
result = format("foo*bar, foo*bar*baz!")
assert result == "foo*bar, foo*bar*baz!\n\e[0m"
result = format("*foo*bar")
assert result == "*foo*bar\n\e[0m"
result = format("foo*bar*")
assert result == "foo*bar*\n\e[0m"
end
test "backtick preceded by space gets interpreted" do
result = format("`unit `size")
assert result == "\e[36munit \e[0msize\n\e[0m"
end
test "star/underscore/backtick with leading escape" do
result = format("\\_unit_")
assert result == "_unit_\n\e[0m"
result = format("\\*unit*")
assert result == "*unit*\n\e[0m"
result = format("\\`unit`")
assert result == "`unit`\n\e[0m"
end
test "star/underscore/backtick with closing escape" do
result = format("_unit\\_")
assert result == "_unit_\n\e[0m"
result = format("*unit\\*")
assert result == "*unit*\n\e[0m"
result = format("`unit\\`")
assert result == "\e[36munit\\\e[0m\n\e[0m"
end
test "star/underscore/backtick with double escape" do
result = format("\\\\*world*")
assert result == "\\\e[1mworld\e[0m\n\e[0m"
result = format("\\\\_world_")
assert result == "\\\e[4mworld\e[0m\n\e[0m"
result = format("\\\\`world`")
assert result == "\\\e[36mworld\e[0m\n\e[0m"
end
test "star/underscore/backtick when incomplete" do
result = format("unit_size")
assert result == "unit_size\n\e[0m"
result = format("unit`size")
assert result == "unit`size\n\e[0m"
result = format("unit*size")
assert result == "unit*size\n\e[0m"
result = format("unit**size")
assert result == "unit**size\n\e[0m"
end
test "backtick with escape" do
result = format("`\\`")
assert result == "\e[36m\\\e[0m\n\e[0m"
end
test "backtick close to underscores gets interpreted as code" do
result = format("`__world__`")
assert result == "\e[36m__world__\e[0m\n\e[0m"
end
test "escaping of underlines within links" do
result = format("(https://en.wikipedia.org/wiki/ANSI_escape_code)")
assert result == "(https://en.wikipedia.org/wiki/ANSI_escape_code)\n\e[0m"
result = format("[ANSI escape code](https://en.wikipedia.org/wiki/ANSI_escape_code)")
assert result == "ANSI escape code (https://en.wikipedia.org/wiki/ANSI_escape_code)\n\e[0m"
end
test "escaping of underlines within links does not escape surrounding text" do
result = format("_emphasis_ (https://en.wikipedia.org/wiki/ANSI_escape_code) more _emphasis_")
assert result ==
"\e[4memphasis\e[0m (https://en.wikipedia.org/wiki/ANSI_escape_code) more \e[4memphasis\e[0m\n\e[0m"
end
test "lone thing that looks like a table line isn't" do
assert format("one\n2 | 3\ntwo\n") == "one 2 | 3 two\n\e[0m"
end
test "lone table line at end of input isn't" do
assert format("one\n2 | 3") == "one 2 | 3\n\e[0m"
end
test "two successive table lines are a table" do
# note spacing
assert format("a | b\none | two\n") == "a | b \none | two\n\e[0m"
end
test "table with heading" do
assert format("column 1 | and 2\n-- | --\na | b\none | two\n") ==
"\e[7mcolumn 1 | and 2\e[0m\na | b \none | two \n\e[0m"
end
test "table with heading alignment" do
table = """
column 1 | 2 | and three
-------: | :------: | :-----
a | even | c\none | odd | three
"""
expected =
"""
\e[7mcolumn 1 | 2 | and three\e[0m
a | even | c\s\s\s\s\s\s\s\s
one | odd | three\s\s\s\s
\e[0m
"""
|> String.trim_trailing()
assert format(table) == expected
end
test "table with formatting in cells" do
assert format("`a` | _b_\nc | d") == "\e[36ma\e[0m | \e[4mb\e[0m\nc | d\n\e[0m"
assert format("`abc` | d \n`e` | f") == "\e[36mabc\e[0m | d\n\e[36me\e[0m | f\n\e[0m"
end
test "table with variable number of columns" do
assert format("a | b | c\nd | e") == "a | b | c\nd | e | \n\e[0m"
end
test "one reference link label per line" do
assert format(" [id]: //example.com\n [Elixir]: http://elixir-lang.org") ==
" [id]: //example.com\n [Elixir]: http://elixir-lang.org"
end
end
| 32.828909 | 113 | 0.607961 |
733ea35c05c9821717e126d0818d36de1fb3952a | 946 | ex | Elixir | lib/outer/transactions/wallet_worker.ex | karolsluszniak/clustered_queue_and_pool-phoenix1.6 | cbf8f51a3c72cb7a5bde31839c1de056775af4f3 | [
"MIT"
] | null | null | null | lib/outer/transactions/wallet_worker.ex | karolsluszniak/clustered_queue_and_pool-phoenix1.6 | cbf8f51a3c72cb7a5bde31839c1de056775af4f3 | [
"MIT"
] | null | null | null | lib/outer/transactions/wallet_worker.ex | karolsluszniak/clustered_queue_and_pool-phoenix1.6 | cbf8f51a3c72cb7a5bde31839c1de056775af4f3 | [
"MIT"
] | null | null | null | defmodule Outer.Transactions.WalletWorker do
use GenServer
require Logger
alias Outer.Transactions.TransactionClient
def start_link(wallet) do
Logger.debug("wallet worker starting for wallet #{wallet.auth_token}")
GenServer.start_link(__MODULE__, wallet)
end
@impl true
def init(wallet) do
Process.flag(:trap_exit, true)
{:ok, wallet}
end
@impl true
def terminate(_reason, wallet) do
Logger.debug("wallet worker terminating for wallet #{wallet.auth_token}")
wallet
end
@impl true
def handle_cast({:make_transaction, transaction, from}, wallet) do
wallet =
wallet
|> TransactionClient.ensure_wallet_balance()
|> TransactionClient.ensure_wallet_funds(transaction.amount)
|> TransactionClient.make_transaction(transaction)
send(Outer.Transactions.WalletManager, {:release_wallet, self(), wallet})
GenServer.reply(from, :ok)
{:noreply, wallet}
end
end
| 25.567568 | 77 | 0.724101 |
733eb6260d8bdb50d857e84c3d134f80c7ee9d6d | 1,689 | exs | Elixir | test/integration/public_channel_test.exs | robktek/poxa | 1c330789735f9bc1e91c3dfa7a10edb3aff50aab | [
"MIT"
] | null | null | null | test/integration/public_channel_test.exs | robktek/poxa | 1c330789735f9bc1e91c3dfa7a10edb3aff50aab | [
"MIT"
] | null | null | null | test/integration/public_channel_test.exs | robktek/poxa | 1c330789735f9bc1e91c3dfa7a10edb3aff50aab | [
"MIT"
] | null | null | null | defmodule Poxa.Integration.PublicChannelTest do
use ExUnit.Case
@moduletag :integration
setup_all do
Application.ensure_all_started(:pusher)
Pusher.configure!("localhost", 8080, "app_id", "app_key", "secret")
:ok
end
setup do
{:ok, pid, socket_id} = Connection.connect
on_exit fn -> PusherClient.disconnect! pid end
{:ok, [pid: pid, socket_id: socket_id]}
end
test "subscribe to a public channel", context do
pid = context[:pid]
channel = "channel"
PusherClient.subscribe!(pid, channel)
assert_receive %{channel: ^channel,
event: "pusher:subscription_succeeded",
data: %{}}, 1_000
end
test "subscribe to a public channel and trigger event", context do
pid = context[:pid]
channel = "channel"
PusherClient.subscribe!(pid, channel)
assert_receive %{channel: ^channel,
event: "pusher:subscription_succeeded",
data: %{}}, 1_000
Pusher.trigger("test_event", %{data: 42}, channel)
assert_receive %{channel: ^channel,
event: "test_event",
data: %{"data" => 42}}, 1_000
end
test "subscribe to a public channel and trigger event excluding itself()", context do
pid = context[:pid]
channel = "channel"
PusherClient.subscribe!(pid, channel)
assert_receive %{channel: ^channel,
event: "pusher:subscription_succeeded",
data: %{}}, 1_000
Pusher.trigger("test_event", %{data: 42}, channel, context[:socket_id])
refute_receive %{channel: ^channel, event: "test_event", data: %{"data" => 42}}, 1_000
end
end
| 26.809524 | 90 | 0.613973 |
733ebc60aae181bf3df2893d3dcf8974198ae238 | 4,552 | ex | Elixir | apps/studio/lib/studio.ex | danmarcab/deep_painting | 860c7d02bd6b112fffa199f715e61d895cba6623 | [
"Apache-2.0"
] | null | null | null | apps/studio/lib/studio.ex | danmarcab/deep_painting | 860c7d02bd6b112fffa199f715e61d895cba6623 | [
"Apache-2.0"
] | 11 | 2020-01-28T22:19:10.000Z | 2022-03-11T23:18:18.000Z | apps/studio/lib/studio.ex | danmarcab/deep_painting | 860c7d02bd6b112fffa199f715e61d895cba6623 | [
"Apache-2.0"
] | null | null | null | defmodule Studio do
@moduledoc """
Studio provides funcions to create, set the settings and start the process to create a painting.
"""
alias Studio.Painter
@doc """
Creates an empty painting with a given name. Name must be unique.
## Examples
iex> Studio.create_painting("My painting")
:ok
iex> Studio.create_painting("My painting")
{:error, :already_created}
"""
def create_painting(name) do
if storage().exists?(storage_name(), name) do
{:error, :already_created}
else
storage().save(storage_name(), Painting.new(name))
end
end
@doc """
Adds content to an existing painting with a given name.
## Examples
iex> Studio.create_painting("My painting")
:ok
iex> Studio.add_painting_content("My painting", "img/content.png")
:ok
iex> Studio.add_painting_content("Not my painting", "img/content.png")
{:error, :not_created}
"""
def add_painting_content(name, content) do
if storage().exists?(storage_name(), name) do
{:ok, painting} = storage().find(storage_name(), name)
storage().save(storage_name(), Painting.add_content(painting, content))
else
{:error, :not_created}
end
end
@doc """
Adds style to an existing painting with a given name.
## Examples
iex> Studio.create_painting("My painting")
:ok
iex> Studio.add_painting_style("My painting", "img/style.png")
:ok
iex> Studio.add_painting_style("Not my painting", "img/style.png")
{:error, :not_created}
"""
def add_painting_style(name, style) do
if storage().exists?(storage_name(), name) do
{:ok, painting} = storage().find(storage_name(), name)
storage().save(storage_name(), Painting.add_style(painting, style))
else
{:error, :not_created}
end
end
@doc """
Adds settings to an existing painting with a given name.
## Examples
iex> Studio.create_painting("My painting")
:ok
iex> Studio.add_painting_settings("My painting", Painting.Settings.new())
:ok
iex> Studio.add_painting_settings("Not my painting", Painting.Settings.new())
{:error, :not_created}
"""
def add_painting_settings(name, %Painting.Settings{} = settings) do
if storage().exists?(storage_name(), name) do
{:ok, painting} = storage().find(storage_name(), name)
storage().save(storage_name(), Painting.add_settings(painting, settings))
else
{:error, :not_created}
end
end
@doc """
Finds an existing painting with a given name.
## Examples
iex> Studio.create_painting("My painting")
:ok
iex> {:ok, %Painting{} = painting} = Studio.find_painting("My painting")
iex> painting.name
"My painting"
iex> Studio.find_painting("Not my painting")
:error
"""
def find_painting(name) do
storage().find(storage_name(), name)
end
@doc """
Returns a map with all exisiting paintings (name of the painting as key, Painting as value)
## Examples
iex> Studio.create_painting("My painting")
:ok
iex> Studio.create_painting("My painting 2")
:ok
iex> paintings_map = Studio.all_paintings()
iex> Map.keys(paintings_map)
["My painting", "My painting 2"]
"""
def all_paintings do
storage().all(storage_name())
end
@doc """
Saves a painting with a given name.
## Examples
iex> p = %Painting{name: "My painting", content: "my_content"}
iex> :ok = Studio.save_painting(p)
iex> {:ok, %Painting{} = painting} = Studio.find_painting("My painting")
iex> {painting.name, painting.content}
{"My painting", "my_content"}
"""
def save_painting(painting) do
storage().save(storage_name(), painting)
end
# TODO: add doc/tests
@spec start_painting(name :: String.t, callback_url :: String.t) :: GenServer.on_start
def start_painting(name, callback_url \\ "") when is_binary(name) and is_binary(callback_url) do
Painter.start_link(name, name: painter_name(name), watcher: Studio.Painting.Broker, callback_url: callback_url)
end
# TODO: add doc/tests
def stop_painting(name) do
Painter.stop(painter_name(name))
end
def clear_storage do
storage().clear(storage_name())
end
defp storage do
Application.get_env(:studio, :painting_storage)[:type]
end
defp storage_name do
Application.get_env(:studio, :painting_storage)[:name]
end
defp painter_name(painting_name) do
{:via, Registry, {Studio.Painter, painting_name}}
end
end
| 26.465116 | 115 | 0.657953 |
733ee4c866adc565afaa4f33200fccb6b9bc153d | 2,847 | ex | Elixir | apps/cashtrail/test/support/factory/banking_factory.ex | maxmaccari/cashtrail | 74ec7ff1f8a41299a6086dc98719a32cbfbe9ee7 | [
"MIT"
] | 6 | 2020-05-02T01:12:24.000Z | 2020-06-23T12:31:32.000Z | apps/cashtrail/test/support/factory/banking_factory.ex | maxmaccari/cashtrail | 74ec7ff1f8a41299a6086dc98719a32cbfbe9ee7 | [
"MIT"
] | 16 | 2020-05-02T00:07:51.000Z | 2021-06-11T01:54:11.000Z | apps/cashtrail/test/support/factory/banking_factory.ex | maxmaccari/cashtrail | 74ec7ff1f8a41299a6086dc98719a32cbfbe9ee7 | [
"MIT"
] | null | null | null | defmodule Cashtrail.Factory.BankingFactory do
@moduledoc false
alias Cashtrail.Banking
alias Cashtrail.Factory.Helpers
defmacro __using__(_opts) do
# Generate a sequence pair from A to Z as first letter, and Z to A as second
# letter in compile time
sequence = for(a <- 65..90, b <- 90..65, do: [a, b] |> to_string)
quote do
# unquote the generated sequence
@iso_code_sequence unquote(sequence)
def institution_factory(attrs \\ %{}) do
logo_url =
"#{Faker.Internet.image_url()}#{Enum.random([".png", ".jpg", ".jpeg", ".gif", ""])}"
%Banking.Institution{
country: Faker.Address.country(),
bank_code: generate_bank_code(),
swift: generate_swift(),
logo_url: logo_url,
contact: build(:contact)
}
|> Helpers.put_tenant(attrs)
|> merge_attributes(Helpers.drop_tenant(attrs))
end
def account_factory(attrs \\ %{}) do
initial_balance_amount =
(:rand.uniform() * Enum.random([10, 100, 1000, 10_000]))
|> Float.round(Enum.random(0..10))
|> Decimal.from_float()
restricted_transaction_types =
[:income, :expense, :tax, :transfer, :exchange, :refund]
|> Enum.take(Enum.random(0..6))
%Banking.Account{
description: sequence(:account, &"Account #{&1}"),
type: Enum.random([:cash, :checking, :saving, :digital, :credit, :investment, :other]),
initial_balance_amount: initial_balance_amount,
initial_balance_date: ~D[2010-01-01] |> Date.range(Date.utc_today()) |> Enum.random(),
avatar_url: Faker.Avatar.image_url(),
restricted_transaction_types: restricted_transaction_types,
predicted_account: nil,
identifier: %Banking.AccountIdentifier{
bank_code: Enum.random(1..999) |> to_string() |> String.pad_leading(3, "0"),
branch: Enum.random(1..9999) |> to_string() |> String.pad_leading(4, "0"),
number: Enum.random(1..999_999) |> to_string() |> String.pad_leading(6, "0"),
swift: generate_swift(),
iban: Faker.Code.iban()
},
currency: Enum.random(["AAA", "BBB", "XXX"])
}
|> Helpers.put_tenant(attrs)
|> merge_attributes(Helpers.drop_tenant(attrs))
end
def generate_bank_code do
Enum.random(1..999) |> to_string() |> String.pad_leading(3, "0")
end
defp generate_swift() do
country_code = Faker.Address.country_code()
bank_code = for(_ <- 1..4, do: [Enum.random(65..90)]) |> to_string()
region =
for(_ <- 1..2, do: Enum.random([Enum.random(65..90), Enum.random(48..57)]))
|> to_string()
"#{bank_code}#{country_code}#{region}XXX"
end
end
end
end
| 36.037975 | 97 | 0.591149 |
733ee86a4c18f0b3db26a240c28ca6f4c8f7a24a | 1,268 | exs | Elixir | test/auto_api/capabilities/engine_capability_test.exs | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | 4 | 2018-01-19T16:11:10.000Z | 2019-12-13T16:35:10.000Z | test/auto_api/capabilities/engine_capability_test.exs | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | 5 | 2020-07-16T07:20:21.000Z | 2021-09-22T10:18:04.000Z | test/auto_api/capabilities/engine_capability_test.exs | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | 1 | 2021-02-17T18:36:13.000Z | 2021-02-17T18:36:13.000Z | # AutoAPI
# The MIT License
#
# Copyright (c) 2018- High-Mobility GmbH (https://high-mobility.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
defmodule AutoApi.EngineCapabilityTest do
use ExUnit.Case, async: true
doctest AutoApi.EngineCapability
end
| 46.962963 | 79 | 0.779968 |
733f00701176e890037e6d8bdb087c80f9631d96 | 440 | ex | Elixir | testData/org/elixir_lang/formatting/no_parentheses_call_multiple_arguments_with_wrapped_map_before.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/formatting/no_parentheses_call_multiple_arguments_with_wrapped_map_before.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/formatting/no_parentheses_call_multiple_arguments_with_wrapped_map_before.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | defmodule Calcinator.Resources.Ecto.RepoTest do
# 111
describe "list/1 with minimum page size with default page size with maximum page size" do
setup [:minimum_page_size, :default_page_size, :maximum_page_size]
test "without page query option returns page with default page size", %{page_size: %{default: default}} do
assert_three_pages %{
page_size: default,
query_options: %{}
}
end
end
end
| 31.428571 | 110 | 0.704545 |
733f2218e1bf2e69fe051a96405c4c2c2ce52412 | 114 | exs | Elixir | apps/ex_wire/test/ex_wire/packet/block_bodies_test.exs | atoulme/ethereum | cebb0756c7292ac266236636d2ab5705cb40a52e | [
"MIT"
] | 14 | 2017-08-21T06:14:49.000Z | 2020-05-15T12:00:52.000Z | apps/ex_wire/test/ex_wire/packet/block_bodies_test.exs | atoulme/ethereum | cebb0756c7292ac266236636d2ab5705cb40a52e | [
"MIT"
] | 7 | 2017-08-11T07:50:14.000Z | 2018-08-23T20:42:50.000Z | apps/ex_wire/test/ex_wire/packet/block_bodies_test.exs | atoulme/ethereum | cebb0756c7292ac266236636d2ab5705cb40a52e | [
"MIT"
] | 3 | 2017-08-20T17:56:41.000Z | 2018-08-21T00:36:10.000Z | defmodule ExWire.Packet.BlockBodiesTest do
use ExUnit.Case, async: true
doctest ExWire.Packet.BlockBodies
end | 22.8 | 42 | 0.815789 |
733f4c3dca70f62a62c2a278cea29402e3a3f838 | 1,116 | ex | Elixir | lib/teiserver/benchmark/benchmark_task.ex | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | 6 | 2021-02-08T10:42:53.000Z | 2021-04-25T12:12:03.000Z | lib/teiserver/benchmark/benchmark_task.ex | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | 14 | 2021-08-01T02:36:14.000Z | 2022-01-30T21:15:03.000Z | lib/teiserver/benchmark/benchmark_task.ex | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | 7 | 2021-05-13T12:55:28.000Z | 2022-01-14T06:39:06.000Z | defmodule Mix.Tasks.Benchmark do
@moduledoc """
Used to stress test a server. Will gradually spawn users until stopped.
Usage:
mix benchmark server port
e.g.
mix benchmark example.com 8200
"""
use Mix.Task
@shortdoc "Starts the benchmark processes for the server"
def run([server, port]) do
# Mix.Task.run("app.start")
Logger.configure(level: :info)
children = [
# Benchmark stuff
{Registry, keys: :unique, name: Teiserver.Benchmark.UserRegistry},
{DynamicSupervisor, strategy: :one_for_one, name: Teiserver.Benchmark.UserSupervisor},
{Teiserver.Benchmark.StatsClient, name: Teiserver.Benchmark.StatsClient}
]
opts = [strategy: :one_for_one, name: Teiserver.Benchmark.Supervisor]
start_result = Supervisor.start_link(children, opts)
# Call all our sub function startup
{:ok, t} = Task.start(fn -> startup() end)
send(t, :begin)
send(Teiserver.Benchmark.StatsClient, {:begin, server, port})
:timer.sleep(300_000_000)
start_result
end
defp startup() do
receive do
:begin -> nil
end
end
end
| 23.744681 | 92 | 0.680108 |
733f76e78fd4bf46590e5d78ad3169ea5b4d1b5a | 556 | exs | Elixir | test/ann/math/linear_test.exs | rdk08/annex | 68380ae23f079cac1dcef7bdfbe6541e19f41864 | [
"MIT"
] | 1 | 2021-02-04T07:03:31.000Z | 2021-02-04T07:03:31.000Z | test/ann/math/linear_test.exs | rdk08/ann | 68380ae23f079cac1dcef7bdfbe6541e19f41864 | [
"MIT"
] | null | null | null | test/ann/math/linear_test.exs | rdk08/ann | 68380ae23f079cac1dcef7bdfbe6541e19f41864 | [
"MIT"
] | null | null | null | defmodule ANN.Math.LinearTest do
use ExUnit.Case, async: true
alias ANN.Math.Linear
test "linear function" do
input = %{
values: [-6, -1, 0, 1, 6]
}
output = Enum.map(input.values, &Linear.call/1)
expected_output = [-6, -1, 0, 1, 6]
assert output == expected_output
end
test "derivative of linear function" do
input = %{
values: [-6, -1, 0, 1, 6]
}
output = Enum.map(input.values, &Linear.call(&1, :derivative))
expected_output = [1, 1, 1, 1, 1]
assert output == expected_output
end
end
| 21.384615 | 66 | 0.602518 |
733f7bcd490ac0bc5b9c08775cd01f4808e184c5 | 1,028 | exs | Elixir | mix.exs | ndalmia/ex_lock | 02dcca2ea2f214952d579ce1b4466ffaadd21bf1 | [
"MIT"
] | 3 | 2020-10-28T11:56:29.000Z | 2021-07-10T13:04:44.000Z | mix.exs | ndalmia/ex_lock | 02dcca2ea2f214952d579ce1b4466ffaadd21bf1 | [
"MIT"
] | null | null | null | mix.exs | ndalmia/ex_lock | 02dcca2ea2f214952d579ce1b4466ffaadd21bf1 | [
"MIT"
] | null | null | null | defmodule ExLock.MixProject do
use Mix.Project
@version "0.1.1"
def project do
[
app: :ex_lock,
version: @version,
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
description: description(),
package: package(),
source_url: "https://github.com/ndalmia/ex_lock/",
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
defp description do
"""
Postgres advisory lock backed elixir Library for locking critical section of code running on multiple machines.
"""
end
defp package do
[
files: ["lib", "mix.exs", "README*", "LICENSE*"],
maintainers: ["Nishant Dalmia"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/ndalmia/ex_lock"}
]
end
defp deps do
[
{:ecto_sql, "~> 3.0"},
{:postgrex, ">= 0.0.0"},
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false}
]
end
end
| 20.979592 | 115 | 0.572957 |
733f7d6ce58c9998f20a54de0b1e4ee0d10cf836 | 2,194 | ex | Elixir | lib/util/frame_splitter.ex | SmartCasual/elixir-zeromq | 91430a6a662399d2d96be731a778e29e8425ce7f | [
"MIT"
] | null | null | null | lib/util/frame_splitter.ex | SmartCasual/elixir-zeromq | 91430a6a662399d2d96be731a778e29e8425ce7f | [
"MIT"
] | null | null | null | lib/util/frame_splitter.ex | SmartCasual/elixir-zeromq | 91430a6a662399d2d96be731a778e29e8425ce7f | [
"MIT"
] | null | null | null | defmodule ZeroMQ.FrameSplitter do
@moduledoc """
A GenServer which when fed a stream of binaries
will split out ZeroMQ frames and return the binary
blob (without parsing into a Command or Message).
"""
use GenServer
@doc """
Starts the splitter.
"""
def start_link do
GenServer.start_link(__MODULE__, :ok, [])
end
@doc """
Adds the provided binary blob to the current stream.
Returns `{:ok, count_of_full_frames_ready}`.
"""
def add_binary(splitter, blob) do
GenServer.call(splitter, {:add_binary, blob})
end
@doc """
Returns the (possibly empty) list of complete frame bodies and flags
as `{:ok, [{flags, frame_body}, ..]}`.
"""
def fetch(splitter) do
GenServer.call(splitter, :fetch)
end
@doc """
Initializes the state with a nil size & flags, the empty stream in progress
and the list for parsed frame bodies.
"""
def init(:ok) do
{:ok, {nil, nil, <<>>, :queue.new}}
end
def handle_call({:add_binary, blob}, _from, {size, flags, stream, frame_bodies}) do
stream = stream <> blob
{flags, size, stream, frame_bodies} = extract_frame_body(flags, size, stream, frame_bodies)
{:reply, {:ok, :queue.len(frame_bodies)}, {size, flags, stream, frame_bodies}}
end
def handle_call(:fetch, _from, {size, flags, stream, frame_bodies}) do
{:reply, {:ok, :queue.to_list(frame_bodies)}, {size, flags, stream, :queue.new}}
end
defp extract_frame_body(flags, size, stream, frame_bodies) do
working_parts =
if size == nil || flags == nil do
ZeroMQ.Frame.extract_flags_and_size(stream)
else
{flags, size, stream}
end
if working_parts == :error do
if byte_size(stream) == 0 do
{nil, nil, stream, frame_bodies}
else
{flags, size, stream, frame_bodies}
end
else
{flags, size, stream} = working_parts
if byte_size(stream) >= size do
<<frame_body::binary-size(size), stream::binary>> = stream
frame_bodies = :queue.in({flags, frame_body}, frame_bodies)
extract_frame_body(nil, nil, stream, frame_bodies)
else
{flags, size, stream, frame_bodies}
end
end
end
end
| 27.08642 | 95 | 0.648587 |
733f8ae3e333ba28d208113a1a9a2c93867911fa | 6,987 | ex | Elixir | server/bitcoinex/lib/secp256k1/math.ex | runcitadel/bitfeed | 2169704007610b02ab9a94b03bfd1c2fa797d80f | [
"MIT"
] | 27 | 2022-01-01T14:58:22.000Z | 2022-03-22T06:11:18.000Z | server/bitcoinex/lib/secp256k1/math.ex | runcitadel/bitfeed | 2169704007610b02ab9a94b03bfd1c2fa797d80f | [
"MIT"
] | 22 | 2022-01-01T04:26:10.000Z | 2022-03-22T00:02:31.000Z | server/bitcoinex/lib/secp256k1/math.ex | runcitadel/bitfeed | 2169704007610b02ab9a94b03bfd1c2fa797d80f | [
"MIT"
] | 9 | 2022-01-03T07:34:36.000Z | 2022-03-14T23:19:56.000Z | defmodule Bitcoinex.Secp256k1.Math do
@moduledoc """
Contains math utilities when dealing with secp256k1 curve points and scalars.
All of the addition and multiplication uses the secp256k1 curve paramaters.
Several of the jacobian multiplication and addition functions are borrowed heavily from https://github.com/starkbank/ecdsa-elixir/.
"""
alias Bitcoinex.Secp256k1.{Params, Point}
import Bitcoinex.Secp256k1.Point
use Bitwise, only_operators: true
@doc """
pow performs integer pow,
where x is raised to the power of y.
"""
# Integer.pow/2 was added since 1.12.0. This function_exported? can be removed when we decide
# to only support >= 1.12.0 in the future
if function_exported?(Integer, :pow, 2) do
defdelegate pow(base, exponent), to: Integer
else
# copy from https://github.com/elixir-lang/elixir/blob/master/lib/elixir/lib/integer.ex#L104
@spec pow(integer, non_neg_integer) :: integer
def pow(base, exponent) when is_integer(base) and is_integer(exponent) and exponent >= 0 do
guarded_pow(base, exponent)
end
# https://en.wikipedia.org/wiki/Exponentiation_by_squaring
defp guarded_pow(_, 0), do: 1
defp guarded_pow(b, 1), do: b
defp guarded_pow(b, e) when (e &&& 1) == 0, do: guarded_pow(b * b, e >>> 1)
defp guarded_pow(b, e), do: b * guarded_pow(b * b, e >>> 1)
end
@doc """
Inv performs the Extended Euclidean Algorithm to to find
the inverse of a number x mod n.
"""
@spec inv(integer, pos_integer) :: integer
def inv(x, n) when is_integer(x) and is_integer(n) and n >= 1 do
do_inv(x, n)
end
defp do_inv(x, _n) when x == 0, do: 0
defp do_inv(x, n), do: do_inv(1, 0, modulo(x, n), n) |> modulo(n)
defp do_inv(lm, hm, low, high) when low > 1 do
r = div(high, low)
do_inv(
hm - lm * r,
lm,
high - low * r,
low
)
end
defp do_inv(lm, _hm, _low, _high) do
lm
end
@spec modulo(integer, integer) :: integer
def modulo(x, n) when is_integer(x) and is_integer(n) do
r = rem(x, n)
if r < 0, do: r + n, else: r
end
@doc """
multiply accepts a point P and scalar n and,
does jacobian multiplication to return resulting point.
"""
def multiply(p, n) when is_point(p) and is_integer(n) do
p
|> toJacobian()
|> jacobianMultiply(n)
|> fromJacobian()
end
@doc """
add accepts points p and q and,
does jacobian addition to return resulting point.
"""
def add(p, q) when is_point(p) and is_point(q) do
jacobianAdd(toJacobian(p), toJacobian(q))
|> fromJacobian()
end
# Convert our point P to jacobian coordinates.
defp toJacobian(p) do
%Point{x: p.x, y: p.y, z: 1}
end
# Convert our jacobian coordinates to a point P on secp256k1 curve.
defp fromJacobian(p) do
z = inv(p.z, Params.curve().p)
%Point{
x:
modulo(
p.x * pow(z, 2),
Params.curve().p
),
y:
modulo(
p.y * pow(z, 3),
Params.curve().p
)
}
end
# double Point P to get point P + P
# We use the dbl-1998-cmo-2 doubling formula.
# For reference, http://www.hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html.
defp jacobianDouble(p) do
if p.y == 0 do
%Point{x: 0, y: 0, z: 0}
else
# XX = X1^2
xsq =
pow(p.x, 2)
|> modulo(Params.curve().p)
# YY = Y1^2
ysq =
pow(p.y, 2)
|> modulo(Params.curve().p)
# S = 4 * X1 * YY
s =
(4 * p.x * ysq)
|> modulo(Params.curve().p)
# M = 3 * XX + a * Z1^4
m =
(3 * xsq + Params.curve().a * pow(p.z, 4))
|> modulo(Params.curve().p)
# T = M^2 - 2 * S
t =
(pow(m, 2) - 2 * s)
|> modulo(Params.curve().p)
# X3 = T
nx = t
# Y3 = M * (S - T) - 8 * YY^2
ny =
(m * (s - t) - 8 * pow(ysq, 2))
|> modulo(Params.curve().p)
# Z3 = 2 * Y1 * Z1
nz =
(2 * p.y * p.z)
|> modulo(Params.curve().p)
%Point{x: nx, y: ny, z: nz}
end
end
# add points P and Q to get P + Q
# We use the add-1998-cmo-2 addition formula.
# For reference, http://www.hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html.
defp jacobianAdd(p, q) do
if p.y == 0 do
q
else
if q.y == 0 do
p
else
# U1 = X1 * Z2^2
u1 =
(p.x * pow(q.z, 2))
|> modulo(Params.curve().p)
# U2 = X2 * Z2^2
u2 =
(q.x * pow(p.z, 2))
|> modulo(Params.curve().p)
# S1 = Y1 * Z2^3
s1 =
(p.y * pow(q.z, 3))
|> modulo(Params.curve().p)
# S2 = y2 * Z1^3
s2 =
(q.y * pow(p.z, 3))
|> modulo(Params.curve().p)
if u1 == u2 do
if s1 != s2 do
%Point{x: 0, y: 0, z: 1}
else
jacobianDouble(p)
end
else
# H = U2 - U1
h = u2 - u1
# r = S2 - S1
r = s2 - s1
# HH = H^2
h2 =
(h * h)
|> modulo(Params.curve().p)
# HHH = H * HH
h3 =
(h * h2)
|> modulo(Params.curve().p)
# V = U1 * HH
v =
(u1 * h2)
|> modulo(Params.curve().p)
# X3 = 42 - HHH - 2 * V
nx =
(pow(r, 2) - h3 - 2 * v)
|> modulo(Params.curve().p)
# Y3 = r * (V - X3) - S1 * HHH
ny =
(r * (v - nx) - s1 * h3)
|> modulo(Params.curve().p)
# Z3 = Z1 * Z2 * H
nz =
(h * p.z * q.z)
|> modulo(Params.curve().p)
%Point{x: nx, y: ny, z: nz}
end
end
end
end
# multply point P with scalar n
defp jacobianMultiply(_p, n) when n == 0 do
%Point{x: 0, y: 0, z: 1}
end
defp jacobianMultiply(p, n) when n == 1 do
if p.y == 0 do
%Point{x: 0, y: 0, z: 1}
else
p
end
end
defp jacobianMultiply(p, n)
# This integer is n, the integer order of G for secp256k1.
# Unfortunately cannot call Params.curve.n to get the curve order integer,
# so instead, it is pasted it here.
# In the future we should move it back to Params.
when n < 0 or
n >
115_792_089_237_316_195_423_570_985_008_687_907_852_837_564_279_074_904_382_605_163_141_518_161_494_337 do
if p.y == 0 do
%Point{x: 0, y: 0, z: 1}
else
jacobianMultiply(p, modulo(n, Params.curve().n))
end
end
defp jacobianMultiply(p, n) when rem(n, 2) == 0 do
if p.y == 0 do
%Point{x: 0, y: 0, z: 1}
else
jacobianMultiply(p, div(n, 2))
|> jacobianDouble()
end
end
defp jacobianMultiply(p, n) do
if p.y == 0 do
%Point{x: 0, y: 0, z: 1}
else
jacobianMultiply(p, div(n, 2))
|> jacobianDouble()
|> jacobianAdd(p)
end
end
end
| 24.43007 | 133 | 0.523114 |
733fa8c19485558c8d7896c15e27f1f7ab1ab3ff | 7,957 | ex | Elixir | clients/cloud_tasks/lib/google_api/cloud_tasks/v2beta2/model/app_engine_http_request.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/cloud_tasks/lib/google_api/cloud_tasks/v2beta2/model/app_engine_http_request.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/cloud_tasks/lib/google_api/cloud_tasks/v2beta2/model/app_engine_http_request.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.CloudTasks.V2beta2.Model.AppEngineHttpRequest do
@moduledoc """
App Engine HTTP request. The message defines the HTTP request that is sent to an App Engine app when the task is dispatched. This proto can only be used for tasks in a queue which has app_engine_http_target set. Using AppEngineHttpRequest requires [`appengine.applications.get`](https://cloud.google.com/appengine/docs/admin-api/access-control) Google IAM permission for the project and the following scope: `https://www.googleapis.com/auth/cloud-platform` The task will be delivered to the App Engine app which belongs to the same project as the queue. For more information, see [How Requests are Routed](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed) and how routing is affected by [dispatch files](https://cloud.google.com/appengine/docs/python/config/dispatchref). Traffic is encrypted during transport and never leaves Google datacenters. Because this traffic is carried over a communication mechanism internal to Google, you cannot explicitly set the protocol (for example, HTTP or HTTPS). The request to the handler, however, will appear to have used the HTTP protocol. The AppEngineRouting used to construct the URL that the task is delivered to can be set at the queue-level or task-level: * If set, app_engine_routing_override is used for all tasks in the queue, no matter what the setting is for the task-level app_engine_routing. The `url` that the task will be sent to is: * `url =` host `+` relative_url Tasks can be dispatched to secure app handlers, unsecure app handlers, and URIs restricted with [`login: admin`](https://cloud.google.com/appengine/docs/standard/python/config/appref). Because tasks are not run as any user, they cannot be dispatched to URIs restricted with [`login: required`](https://cloud.google.com/appengine/docs/standard/python/config/appref) Task dispatches also do not follow redirects. The task attempt has succeeded if the app's request handler returns an HTTP response code in the range [`200` - `299`]. The task attempt has failed if the app's handler returns a non-2xx response code or Cloud Tasks does not receive response before the deadline. Failed tasks will be retried according to the retry configuration. `503` (Service Unavailable) is considered an App Engine system error instead of an application error and will cause Cloud Tasks' traffic congestion control to temporarily throttle the queue's dispatches. Unlike other types of task targets, a `429` (Too Many Requests) response from an app handler does not cause traffic congestion control to throttle the queue.
## Attributes
- appEngineRouting (AppEngineRouting): Task-level setting for App Engine routing. If set, app_engine_routing_override is used for all tasks in the queue, no matter what the setting is for the task-level app_engine_routing. Defaults to: `null`.
- headers (%{optional(String.t) => String.t}): HTTP request headers. This map contains the header field names and values. Headers can be set when the task is created. Repeated headers are not supported but a header value can contain commas. Cloud Tasks sets some headers to default values: * `User-Agent`: By default, this header is `\"AppEngine-Google; (+http://code.google.com/appengine)\"`. This header can be modified, but Cloud Tasks will append `\"AppEngine-Google; (+http://code.google.com/appengine)\"` to the modified `User-Agent`. If the task has a payload, Cloud Tasks sets the following headers: * `Content-Type`: By default, the `Content-Type` header is set to `\"application/octet-stream\"`. The default can be overridden by explicitly setting `Content-Type` to a particular media type when the task is created. For example, `Content-Type` can be set to `\"application/json\"`. * `Content-Length`: This is computed by Cloud Tasks. This value is output only. It cannot be changed. The headers below cannot be set or overridden: * `Host` * `X-Google-*` * `X-AppEngine-*` In addition, Cloud Tasks sets some headers when the task is dispatched, such as headers containing information about the task; see [request headers](https://cloud.google.com/appengine/docs/python/taskqueue/push/creating-handlers#reading_request_headers). These headers are set only when the task is dispatched, so they are not visible when the task is returned in a Cloud Tasks response. Although there is no specific limit for the maximum number of headers or the size, there is a limit on the maximum size of the Task. For more information, see the CreateTask documentation. Defaults to: `null`.
- httpMethod (String.t): The HTTP method to use for the request. The default is POST. The app's request handler for the task's target URL must be able to handle HTTP requests with this http_method, otherwise the task attempt will fail with error code 405 (Method Not Allowed). See [Writing a push task request handler](https://cloud.google.com/appengine/docs/java/taskqueue/push/creating-handlers#writing_a_push_task_request_handler) and the documentation for the request handlers in the language your app is written in e.g. [Python Request Handler](https://cloud.google.com/appengine/docs/python/tools/webapp/requesthandlerclass). Defaults to: `null`.
- Enum - one of [HTTP_METHOD_UNSPECIFIED, POST, GET, HEAD, PUT, DELETE]
- payload (binary()): Payload. The payload will be sent as the HTTP message body. A message body, and thus a payload, is allowed only if the HTTP method is POST or PUT. It is an error to set a data payload on a task with an incompatible HttpMethod. Defaults to: `null`.
- relativeUrl (String.t): The relative URL. The relative URL must begin with \"/\" and must be a valid HTTP relative URL. It can contain a path and query string arguments. If the relative URL is empty, then the root path \"/\" will be used. No spaces are allowed, and the maximum length allowed is 2083 characters. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:appEngineRouting => GoogleApi.CloudTasks.V2beta2.Model.AppEngineRouting.t(),
:headers => map(),
:httpMethod => any(),
:payload => any(),
:relativeUrl => any()
}
field(:appEngineRouting, as: GoogleApi.CloudTasks.V2beta2.Model.AppEngineRouting)
field(:headers, type: :map)
field(:httpMethod)
field(:payload)
field(:relativeUrl)
end
defimpl Poison.Decoder, for: GoogleApi.CloudTasks.V2beta2.Model.AppEngineHttpRequest do
def decode(value, options) do
GoogleApi.CloudTasks.V2beta2.Model.AppEngineHttpRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudTasks.V2beta2.Model.AppEngineHttpRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 130.442623 | 2,713 | 0.760085 |
733fa99a4c64984de7fdfb4de48e71f18b11555f | 740 | ex | Elixir | lib/hello_hipster_stack_web/gettext.ex | jmarca/hello_hipster_stack | 1391b8c9ba16f80d753a5694ed7c0c9d9b494bb4 | [
"Apache-2.0"
] | null | null | null | lib/hello_hipster_stack_web/gettext.ex | jmarca/hello_hipster_stack | 1391b8c9ba16f80d753a5694ed7c0c9d9b494bb4 | [
"Apache-2.0"
] | null | null | null | lib/hello_hipster_stack_web/gettext.ex | jmarca/hello_hipster_stack | 1391b8c9ba16f80d753a5694ed7c0c9d9b494bb4 | [
"Apache-2.0"
] | null | null | null | defmodule HelloHipsterStackWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import HelloHipsterStackWeb.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :hello_hipster_stack
end
| 29.6 | 72 | 0.694595 |
733fac2fad3ef83964adb3ec2816ee12502bb748 | 634 | exs | Elixir | priv/repo/migrations/20181224155641_create_abilities.exs | jgsmith/ex_venture | 546adaa8fe80d45a72fde6de8d8d6906902c12d4 | [
"MIT"
] | 2 | 2019-05-14T11:36:44.000Z | 2020-07-01T08:54:04.000Z | priv/repo/migrations/20181224155641_create_abilities.exs | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | null | null | null | priv/repo/migrations/20181224155641_create_abilities.exs | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | 1 | 2021-01-29T14:12:40.000Z | 2021-01-29T14:12:40.000Z | defmodule Data.Repo.Migrations.CreateAbilities do
use Ecto.Migration
def change do
create table(:proficiencies) do
add(:name, :string, null: false)
add(:type, :string, null: false)
add(:description, :string)
timestamps()
end
create table(:class_proficiencies) do
add(:class_id, references(:classes), null: false)
add(:proficiency_id, references(:proficiencies), null: false)
add(:level, :integer, null: false)
add(:ranks, :integer, null: false)
timestamps()
end
create index(:class_proficiencies, [:class_id, :proficiency_id], unique: true)
end
end
| 25.36 | 82 | 0.664038 |
733fb1859d5b542d7ad23fc51a7bb108a55b0cbd | 512 | ex | Elixir | apps/evm/lib/evm/operation/metadata/exchange.ex | wolflee/mana | db66dac85addfaad98d40da5bd4082b3a0198bb1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 152 | 2018-10-27T04:52:03.000Z | 2022-03-26T10:34:00.000Z | apps/evm/lib/evm/operation/metadata/exchange.ex | wolflee/mana | db66dac85addfaad98d40da5bd4082b3a0198bb1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 270 | 2018-04-14T07:34:57.000Z | 2018-10-25T18:10:45.000Z | apps/evm/lib/evm/operation/metadata/exchange.ex | wolflee/mana | db66dac85addfaad98d40da5bd4082b3a0198bb1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 25 | 2018-10-27T12:15:13.000Z | 2022-01-25T20:31:14.000Z | defmodule EVM.Operation.Metadata.Exchange do
@operations for n <- 1..17,
do: %EVM.Operation.Metadata{
# 0x90..0x9e
id: n + 0x8F,
description: "Exchange #{n}st and #{n + 1}nd stack items.",
sym: :"swap#{n}",
fun: :swap,
input_count: n + 1,
output_count: 2,
group: :exchange
}
def operations, do: @operations
end
| 32 | 79 | 0.417969 |
73400049cd9bc98ba840096dd6e2d7677dd51a04 | 3,520 | ex | Elixir | lib/sparql/language/parse_helper.ex | marcelotto/sparql-ex | 7bf939a2b0eec7e1096f6fdb999b07757995c145 | [
"MIT"
] | 23 | 2018-09-25T21:09:35.000Z | 2020-05-14T16:28:22.000Z | lib/sparql/language/parse_helper.ex | rdf-elixir/sparql-ex | 138141797a1993ecfb50ddac6c1d2977601a11ff | [
"MIT"
] | 2 | 2018-06-01T20:47:48.000Z | 2019-03-05T23:20:34.000Z | lib/sparql/language/parse_helper.ex | marcelotto/sparql-ex | 7bf939a2b0eec7e1096f6fdb999b07757995c145 | [
"MIT"
] | 2 | 2019-12-13T19:20:54.000Z | 2019-12-20T08:23:21.000Z | defmodule SPARQL.Language.ParseHelper do
alias RDF.Literal
def variable('?' ++ name), do: List.to_string(name)
def variable('$' ++ name), do: List.to_string(name)
# TODO: Literal construction should not happen in the lexer, but during parsing;
# grammars and RDF.Serialization.ParseHelper should be rewritten accordingly
def extract_literal({_, _, literal}), do: literal
def strip_sign(%Literal{} = literal) do
{_sign, number} = literal |> Literal.lexical() |> String.split_at(1)
Literal.update(literal, fn _ -> number end)
end
def arithmetic_expr({:rpn, rpn}), do: rpn_to_ast(rpn)
def arithmetic_expr(expr), do: expr
def arithmetic_expr(left, [operator, {:rpn, rpn}]) do
arithmetic_expr(left, [operator, rpn_to_ast(rpn)])
end
def arithmetic_expr({:rpn, rpn}, [operator, right]) do
arithmetic_expr(rpn_to_ast(rpn), [operator, right])
end
def arithmetic_expr([{left_operator, _}, left], {:rpn, rpn})
when left_operator in ~w[* /]a do
{:rpn, [left, left_operator | rpn]}
end
def arithmetic_expr([{left_operator, _}, left], [{right_operator, _}, right])
when left_operator in ~w[* /]a and right_operator in ~w[* /]a do
{:rpn, [left, left_operator, right, right_operator]}
end
def arithmetic_expr(left, [operator, right]) do
{:builtin_function_call, arithmetic_operator(operator), [left, right]}
end
def arithmetic_expr(left, {:rpn, rpn}) do
rpn_to_ast([left | rpn])
end
def arithmetic_expr(left_operator, left, [right_operator, {:rpn, rpn}]) do
arithmetic_expr(left_operator, left, [right_operator, rpn_to_ast(rpn)])
end
def arithmetic_expr(left_operator, left, [right_operator, right]) do
{:rpn, [left, arithmetic_operator(left_operator), right, arithmetic_operator(right_operator)]}
end
def arithmetic_expr(left_operator, left, {:rpn, rpn}) do
{:rpn, [left, arithmetic_operator(left_operator) | rpn]}
end
def arithmetic_quirk_expr(sign, left, {:rpn, rpn}, [additive_operator, right]) do
{:rpn,
[left | rpn] ++ [
sign,
right,
arithmetic_operator(additive_operator)
]
}
end
def arithmetic_quirk_expr(sign, left,
[{multiplicative_operator, _}, middle],
[additive_operator, right]) do
{:rpn,
[
arithmetic_expr(left, [multiplicative_operator, middle]),
sign,
right,
arithmetic_operator(additive_operator)
]
}
end
def multiplicative_expr({left_operator, _}, left, {:rpn, [right | rpn]}) do
{:rpn, [left, right, left_operator | rpn]}
end
def multiplicative_expr({left_operator, _}, left, right) do
{:rpn, [left, right, left_operator]}
end
def multiplicative_quirk_expr(sign, left, {:rpn, rpn}) do
{:rpn, [left | rpn] ++ [sign]}
end
def multiplicative_quirk_expr(sign, left, [{right_operator, _}, right]) do
{:rpn, [left, right, right_operator, sign]}
end
defp arithmetic_operator({operator, _}), do: operator
defp arithmetic_operator(operator) when is_atom(operator), do: operator
defp rpn_to_ast(rpn) do
{function_call, []} = rpn |> Enum.reverse() |> do_rpn_to_ast()
function_call
end
defp do_rpn_to_ast([operator | rest]) when operator in ~w[+ - * /]a do
{right, rest} = do_rpn_to_ast(rest)
{left, rest} = do_rpn_to_ast(rest)
{{:builtin_function_call, operator, [left, right]}, rest}
end
defp do_rpn_to_ast([arg | rest]), do: {arg, rest}
end
| 30.608696 | 98 | 0.659659 |
734005dad4d28c3763dfec58ae6346e803230c7f | 1,279 | ex | Elixir | base/fc_base/lib/fc_base.ex | fleadope/freshcom | 8d5944befaa6eea8d31e5f5995939be2a1a44262 | [
"BSD-3-Clause"
] | 46 | 2018-10-13T23:18:13.000Z | 2021-08-07T07:46:51.000Z | base/fc_base/lib/fc_base.ex | fleadope/freshcom | 8d5944befaa6eea8d31e5f5995939be2a1a44262 | [
"BSD-3-Clause"
] | 25 | 2018-10-14T00:56:07.000Z | 2019-12-23T19:41:02.000Z | base/fc_base/lib/fc_base.ex | fleadope/freshcom | 8d5944befaa6eea8d31e5f5995939be2a1a44262 | [
"BSD-3-Clause"
] | 5 | 2018-12-16T04:39:51.000Z | 2020-10-01T12:17:03.000Z | defmodule FCBase do
def policy do
quote do
import FCBase.Policy
@admin_roles ["owner", "administrator"]
@dev_roles @admin_roles ++ ["developer"]
@customer_management_roles @dev_roles ++ ["manager", "support_specialist"]
@goods_management_roles @dev_roles ++ ["manager", "goods_specialist"]
def authorize(%{requester_role: "sysdev"} = cmd, _), do: {:ok, cmd}
def authorize(%{requester_role: "system"} = cmd, _), do: {:ok, cmd}
def authorize(%{requester_role: "appdev"} = cmd, _), do: {:ok, cmd}
def authorize(%{account_id: c_aid} = cmd, %{account_id: s_aid}) when (not is_nil(c_aid)) and (not is_nil(s_aid)) and (c_aid != s_aid), do: {:error, :access_denied}
def authorize(%{client_type: "unkown"}, _), do: {:error, :access_denied}
end
end
def aggregate do
quote do
import FCSupport.{Changeset, Struct}
end
end
def command_handler do
quote do
use OK.Pipe
import FCSupport.{ControlFlow, Struct}
import FCBase.CommandHandler
alias FCSupport.Translation
end
end
def event do
quote do
use TypedStruct
@derive Jason.Encoder
end
end
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 26.645833 | 169 | 0.645035 |
73400f28c5cb21d09bc8ee1744075e9b775a269a | 1,044 | ex | Elixir | lib/oli/publishing/tracker.ex | ChristianMurphy/oli-torus | ffeee4996b66b7c6c6eb3e0082d030b8cc6cea97 | [
"MIT"
] | null | null | null | lib/oli/publishing/tracker.ex | ChristianMurphy/oli-torus | ffeee4996b66b7c6c6eb3e0082d030b8cc6cea97 | [
"MIT"
] | null | null | null | lib/oli/publishing/tracker.ex | ChristianMurphy/oli-torus | ffeee4996b66b7c6c6eb3e0082d030b8cc6cea97 | [
"MIT"
] | null | null | null | defmodule Oli.Publishing.ChangeTracker do
alias Oli.Publishing
alias Oli.Publishing.AuthoringResolver
@doc """
Tracks the creation of a new revision for the current
unpublished publication. If `changes` argument is
supplied it treats the `revision` argument as a base
revision and creates a new revision from this base with
the applied changes. If `changes` argument is not supplied or
is nil, then the `revision` argument is assumed to be an
already new revision.
"""
def track_revision(project_slug, revision, changes \\ nil) do
process_change(
project_slug,
revision,
&Oli.Resources.create_revision_from_previous/2,
changes
)
end
defp process_change(project_slug, revision, processor, changes) do
publication = AuthoringResolver.publication(project_slug)
{:ok, resultant_revision} =
case changes do
nil -> {:ok, revision}
c -> processor.(revision, c)
end
Publishing.upsert_published_resource(publication, resultant_revision)
end
end
| 29.828571 | 73 | 0.724138 |
73402607491753ce7f0c366bb8efe680ed874b6e | 1,671 | exs | Elixir | test/expwd_test.exs | sergeypopol/expwd | 774f13fd13d81b179ce955ac47e96b349e8f9c6b | [
"Apache-2.0"
] | null | null | null | test/expwd_test.exs | sergeypopol/expwd | 774f13fd13d81b179ce955ac47e96b349e8f9c6b | [
"Apache-2.0"
] | null | null | null | test/expwd_test.exs | sergeypopol/expwd | 774f13fd13d81b179ce955ac47e96b349e8f9c6b | [
"Apache-2.0"
] | null | null | null | defmodule ExpwdTest do
use ExUnit.Case
doctest Expwd
@tag timeout: 60 * 60000
test "Constant compare" do
pwd1 = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
pwd2 = "a_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
pwd3 = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa_a"
pwd4 = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaa_aaaaaaaaaaaaaaaaaaaaaaaaaaa"
combos = [
{pwd1, pwd1},
{pwd1, pwd2},
{pwd1, pwd3},
{pwd1, pwd4},
{pwd2, pwd1},
{pwd2, pwd2},
{pwd2, pwd3},
{pwd2, pwd4},
{pwd3, pwd1},
{pwd3, pwd2},
{pwd3, pwd3},
{pwd3, pwd4},
{pwd4, pwd1},
{pwd4, pwd2},
{pwd4, pwd3},
{pwd4, pwd4}
]
compare_times = Enum.map(
combos,
fn {pwda, pwdb} ->
start_time = :os.system_time()
secure_compare_n(pwda, pwdb, 100_000)
end_time = :os.system_time()
end_time - start_time
end
)
avg = Enum.sum(compare_times) / Enum.count(compare_times)
assert Enum.max(compare_times) / avg < 1.1 # less than 10% differences
assert Enum.min(compare_times) / avg > 0.9
end
defp secure_compare_n(_, _, 0), do: :ok
defp secure_compare_n(a, b, n) do
Expwd.secure_compare(a, b)
secure_compare_n(a, b, n - 1)
end
test "Secure compare" do
assert Expwd.secure_compare("scgdrfsxzswteztgsderxtgzdgsxtgtsdtzsapkiok",
"scgdrfsxzswteztgsderxtgzdgsxtgtsdtzsapkiok")
refute Expwd.secure_compare("scgdrfsxzswteztgsderxtgzdgsxtgtsdtzsapkiok",
"scgdrfsxzswteztqpaorсreqqgsxtgtsdtzsapkiok")
end
end
| 26.951613 | 77 | 0.636146 |
73405b768a953f11c3c13153f37684f309e128fd | 2,991 | ex | Elixir | apps/esperanto/lib/trybe/esperanto/parsers/generics/enclosing_tag.ex | betrybe/esperanto | 65e40c28914397ef77f5d2c4c29001330beba19f | [
"Apache-2.0"
] | 6 | 2021-07-19T20:00:22.000Z | 2021-11-03T03:27:40.000Z | apps/esperanto/lib/trybe/esperanto/parsers/generics/enclosing_tag.ex | betrybe/esperanto | 65e40c28914397ef77f5d2c4c29001330beba19f | [
"Apache-2.0"
] | null | null | null | apps/esperanto/lib/trybe/esperanto/parsers/generics/enclosing_tag.ex | betrybe/esperanto | 65e40c28914397ef77f5d2c4c29001330beba19f | [
"Apache-2.0"
] | null | null | null | defmodule Esperanto.Parsers.Generics.EnclosingTag do
alias Esperanto.Parsers.TopLevel
alias Esperanto.ParserUtility
alias Esperanto.Walker
@doc """
opts
* :start_delimiter
* :barrier
* :enclosing_tag
* :attrs
"""
@moduledoc """
Simple enclose the contents between `:start_delimiter` and `:barrier`
with the `enclosing_tag` and `attrs` specified
It's possible to surround all siblings together with the `surround` tag if specified
"""
defmacro __using__(options) do
start_delimiter = Keyword.get(options, :start_delimiter)
barrier = Keyword.get(options, :barrier)
tag = Keyword.get(options, :enclosing_tag)
surrounding_tag = Keyword.get(options, :surrounding_tag, nil)
create_node_bloc =
if surrounding_tag do
quote do
parent = NaryTree.get(tree, parent_id)
tree =
case find_surrounding(parent, tree) do
nil ->
surrounding = NaryTree.Node.new(@surrounding_tag, {:empty, @surrounding_attrs})
tree
|> NaryTree.add_child(surrounding, parent_id)
|> NaryTree.add_child(node, surrounding.id)
surrounding ->
NaryTree.add_child(tree, node, surrounding.id)
end
end
else
quote do
tree = NaryTree.add_child(tree, node, parent_id)
end
end
quote do
require Logger
@behaviour Esperanto.Parser
@start_delimiter unquote(start_delimiter)
@barrier unquote(barrier)
@tag unquote(tag)
@surrounding_tag unquote(surrounding_tag)
@attrs Keyword.get(unquote(options), :attrs, %{})
@surrounding_attrs Keyword.get(unquote(options), :surrounding_attrs, %{})
@impl Esperanto.Parser
def parse(walker, tree, parent_id, opts) do
ParserUtility.ensure_has_matched(walker, @start_delimiter)
node = NaryTree.Node.new(@tag, {:empty, @attrs})
unquote(create_node_bloc)
{tree, walker} =
walker
|> Walker.consume_input_matching_regex(@start_delimiter)
|> Walker.with_barrier(@barrier)
|> TopLevel.parse(tree, node.id, opts)
{tree, Walker.destroy_barrier(walker)}
end
@impl Esperanto.Parser
def should_parse(%Walker{input: input}, _, _, opts) do
ParserUtility.match(input, @start_delimiter)
end
defp find_surrounding(parent, tree),
do:
ParserUtility.find_sibiling(parent, tree)
|> find_surrounding(tree, parent.id)
# node is arealdy surrounded with the desire tag
defp find_surrounding(
%NaryTree.Node{name: @surrounding_tag, content: {:empty, _attrs}} = surrouding,
tree,
_parent_id
) do
surrouding
end
defp find_surrounding(
_sibiling,
_tree,
_parent_id
) do
nil
end
end
end
end
| 28.216981 | 95 | 0.616516 |
73405fb02a826f3223c20ef8e6f195f5dfc22ca6 | 962 | ex | Elixir | clients/discovery/lib/google_api/discovery/v1/request_builder.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/discovery/lib/google_api/discovery/v1/request_builder.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/discovery/lib/google_api/discovery/v1/request_builder.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Discovery.V1.RequestBuilder do
@moduledoc """
Helper functions for building Tesla requests.
This module is no longer used. Please use GoogleApi.Gax.Request instead.
"""
end
| 37 | 77 | 0.764033 |
73407d65923e3afbf1104cf5aa33ce96eac11a95 | 1,320 | exs | Elixir | mix.exs | lessrest/chroxy | e4d0ec7a694c8d2ebbdb277056977f0cbb261a99 | [
"MIT"
] | 201 | 2018-05-02T16:12:27.000Z | 2022-03-18T16:53:19.000Z | mix.exs | lessrest/chroxy | e4d0ec7a694c8d2ebbdb277056977f0cbb261a99 | [
"MIT"
] | 41 | 2018-06-09T11:48:19.000Z | 2022-03-03T17:10:19.000Z | mix.exs | lessrest/chroxy | e4d0ec7a694c8d2ebbdb277056977f0cbb261a99 | [
"MIT"
] | 26 | 2018-06-05T12:33:36.000Z | 2021-12-29T18:15:44.000Z | defmodule Chroxy.MixProject do
use Mix.Project
def project do
[
app: :chroxy,
version: "0.7.0",
elixir: "~> 1.7",
start_permanent: Mix.env() == :prod,
deps: deps(),
description: description(),
package: package(),
docs: [main: "Chroxy", logo: "logo.png", extras: ["README.md"]]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger, :erlexec, :exexec],
mod: {Chroxy.Application, []}
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:plug, "~> 1.10.0"},
{:plug_cowboy, "~> 2.4.0"},
{:cowboy, "~> 2.8.0"},
{:jason, "~> 1.1"},
{:erlexec, "~> 1.10.0"},
{:exexec, "~> 0.2"},
{:chrome_remote_interface, "~> 0.3.0"},
{:ex_doc, "~> 0.20", only: :dev, runtime: false}
]
end
defp description() do
"Chrome Proxy Service enabling scalable remote debug protocol connections to managed Headless Chrome instances."
end
defp package() do
[
name: "chroxy",
files: ["config", "lib", "mix.exs", "README*", "LICENSE*"],
maintainers: ["Steven Holdsworth (@holsee)"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/holsee/chroxy"}
]
end
end
| 24.90566 | 116 | 0.553788 |
7340abbbd1eb8ca81273b0d81581659da4a90978 | 1,813 | ex | Elixir | lib/new_relic/util/error.ex | alejandrodnm/elixir_agent | b2264d8f54244d136950cadd02f2533967a8e7cb | [
"Apache-2.0"
] | null | null | null | lib/new_relic/util/error.ex | alejandrodnm/elixir_agent | b2264d8f54244d136950cadd02f2533967a8e7cb | [
"Apache-2.0"
] | null | null | null | lib/new_relic/util/error.ex | alejandrodnm/elixir_agent | b2264d8f54244d136950cadd02f2533967a8e7cb | [
"Apache-2.0"
] | null | null | null | defmodule NewRelic.Util.Error do
# Helper functions for normalizing and formatting errors
@moduledoc false
def normalize(kind, exception, stacktrace, initial_call \\ nil)
def normalize(kind, exception, stacktrace, initial_call) do
normalized_error = Exception.normalize(kind, exception, stacktrace)
exception_type = format_type(kind, normalized_error)
exception_reason = format_reason(kind, normalized_error)
exception_stacktrace = format_stacktrace(stacktrace, initial_call)
{exception_type, exception_reason, exception_stacktrace}
end
def format_type(:error, %ErlangError{original: {_reason, {module, function, args}}}),
do: Exception.format_mfa(module, function, length(args))
def format_type(:error, %{__struct__: struct}), do: inspect(struct)
def format_type(:exit, _reason), do: "EXIT"
def format_reason(:error, %ErlangError{original: {reason, {module, function, args}}}),
do: "(" <> Exception.format_mfa(module, function, length(args)) <> ") " <> inspect(reason)
def format_reason(:error, error),
do:
:error
|> Exception.format_banner(error)
|> String.replace("** ", "")
def format_reason(:exit, {reason, {module, function, args}}),
do: "(" <> Exception.format_mfa(module, function, length(args)) <> ") " <> inspect(reason)
def format_reason(:exit, %{__exception__: true} = error), do: format_reason(:error, error)
def format_reason(:exit, reason), do: inspect(reason)
def format_stacktrace(stacktrace, initial_call),
do:
stacktrace
|> prepend_initial_call(initial_call)
|> Enum.map(&Exception.format_stacktrace_entry/1)
defp prepend_initial_call(stacktrace, {mod, fun, args}),
do: stacktrace ++ [{mod, fun, args, []}]
defp prepend_initial_call(stacktrace, _), do: stacktrace
end
| 36.26 | 94 | 0.710425 |
7340bc48a26af14b1dd3e99c80824a261bcb8ed6 | 4,300 | ex | Elixir | clients/cloud_support/lib/google_api/cloud_support/v2beta/api/attachments.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_support/lib/google_api/cloud_support/v2beta/api/attachments.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_support/lib/google_api/cloud_support/v2beta/api/attachments.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudSupport.V2beta.Api.Attachments do
@moduledoc """
API calls for all endpoints tagged `Attachments`.
"""
alias GoogleApi.CloudSupport.V2beta.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Create a file attachment on a case or Cloud resource.
## Parameters
* `connection` (*type:* `GoogleApi.CloudSupport.V2beta.Connection.t`) - Connection to server
* `v2beta_id` (*type:* `String.t`) - Part of `parent`. Required. The resource name of the case to which attachment should be attached.
* `v2beta_id1` (*type:* `String.t`) - Part of `parent`. See documentation of `v2betaId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.CloudSupport.V2beta.Model.CreateAttachmentRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.CloudSupport.V2beta.Model.Attachment{}}` on success
* `{:error, info}` on failure
"""
@spec cloudsupport_attachments_create(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.CloudSupport.V2beta.Model.Attachment.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def cloudsupport_attachments_create(
connection,
v2beta_id,
v2beta_id1,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v2beta/{v2betaId}/{v2betaId1}/attachments", %{
"v2betaId" => URI.encode(v2beta_id, &URI.char_unreserved?/1),
"v2betaId1" => URI.encode(v2beta_id1, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudSupport.V2beta.Model.Attachment{}])
end
end
| 41.346154 | 196 | 0.637442 |
7340d2ec8cb1cb0ae6904d3cb3050922a85e1f1e | 1,194 | exs | Elixir | mix.exs | KeenMate/dynamic_template | e5fa61889feaf8eb780fe0bdf45c1dd85372aad0 | [
"MIT"
] | null | null | null | mix.exs | KeenMate/dynamic_template | e5fa61889feaf8eb780fe0bdf45c1dd85372aad0 | [
"MIT"
] | null | null | null | mix.exs | KeenMate/dynamic_template | e5fa61889feaf8eb780fe0bdf45c1dd85372aad0 | [
"MIT"
] | null | null | null | defmodule DynamicTemplate.MixProject do
use Mix.Project
def project do
[
app: :dynamic_template,
version: "0.1.2",
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps(),
description: description(),
package: package(),
name: "DynamicTemplate",
source_url: "https://github.com/KeenMate/dynamic_template.git"
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
defp description() do
"This module allows you to dynamically compile and load EEx templates into your application as separate Elixir modules"
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:phoenix_html, "~> 2.14.2"}
]
end
defp package() do
[
# This option is only needed when you don't want to use the OTP application name
name: "dynamic_template",
# These are the default files included in the package
files: ~w(lib .formatter.exs mix.exs README.md LICENSE),
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/KeenMate/dynamic_template"}
]
end
end
| 25.404255 | 123 | 0.638191 |
7340e3d8f1f8952605d72a8d00c6210dd4947909 | 426 | exs | Elixir | test/shitty_linq_ex_test.exs | chr1sto/shitty_linq_ex | 4ba4e102b3e8820889ed945da0750f97e14c92fd | [
"Unlicense"
] | null | null | null | test/shitty_linq_ex_test.exs | chr1sto/shitty_linq_ex | 4ba4e102b3e8820889ed945da0750f97e14c92fd | [
"Unlicense"
] | null | null | null | test/shitty_linq_ex_test.exs | chr1sto/shitty_linq_ex | 4ba4e102b3e8820889ed945da0750f97e14c92fd | [
"Unlicense"
] | null | null | null | defmodule ShittyLinqExTest do
use ExUnit.Case, async: true
doctest ShittyLinqEx
alias ShittyLinqEx
test "reverse of empty list" do
assert ShittyLinqEx.reverse([]) == []
end
test "reverse of normal list" do
assert ShittyLinqEx.reverse([1, 3, 5, 7]) == [7, 5, 3, 1]
end
test "reverse of big list" do
assert ShittyLinqEx.reverse(Enum.to_list(1..1_000_000)) == Enum.to_list(1_000_000..1)
end
end
| 22.421053 | 89 | 0.687793 |
7340f140cb433b784b7e5547674d623a21a76afb | 4,480 | ex | Elixir | lib/broadway/topology/subscriber.ex | nathanl/broadway | 261660c4bb810b16942b623f99b367ea81490852 | [
"Apache-2.0"
] | null | null | null | lib/broadway/topology/subscriber.ex | nathanl/broadway | 261660c4bb810b16942b623f99b367ea81490852 | [
"Apache-2.0"
] | null | null | null | lib/broadway/topology/subscriber.ex | nathanl/broadway | 261660c4bb810b16942b623f99b367ea81490852 | [
"Apache-2.0"
] | null | null | null | defmodule Broadway.Topology.Subscriber do
# This modules defines conveniences for subscribing to producers
# and how to resubscribe to them in case of crashes.
#
# In practice, only the first layer resubscribers in case of crashes
# as the remaining ones are shutdown via the supervision tree which
# is set as one_for_all and max_restarts of 0 to the inner most
# supervisor while the outer most is rest for one. This guarantees
# that either all processess are running or none of them.
#
# For graceful shutdowns, we rely on cancellations with the help
# of the terminator.
@moduledoc false
@behaviour GenStage
def start_link(module, names, options, subscriptions_options, stage_options) do
GenStage.start_link(
__MODULE__,
{module, names, options, subscriptions_options},
stage_options
)
end
@impl true
def init({module, names, options, subscription_options}) do
{type, state, init_options} = module.init(options)
terminator = Keyword.fetch!(options, :terminator)
resubscribe = Keyword.fetch!(options, :resubscribe)
partition = Keyword.fetch!(options, :partition)
subscription_options =
subscription_options
|> Keyword.put(:partition, partition)
|> Keyword.put_new(:cancel, :temporary)
state =
Map.merge(state, %{
callback: module,
terminator: if(type == :consumer, do: terminator),
resubscribe: resubscribe,
producers: %{},
consumers: [],
subscription_options: subscription_options
})
# We always subscribe in random order so the load is balanced across consumers.
names |> Enum.shuffle() |> Enum.each(&subscribe(&1, state))
extra_options = if type == :consumer, do: [], else: [buffer_size: :infinity]
{type, state, extra_options ++ init_options}
end
@impl true
def handle_events(events, from, %{callback: callback} = state) do
callback.handle_events(events, from, state)
end
@impl true
def handle_subscribe(:producer, opts, {_, ref}, state) do
process_name = Keyword.fetch!(opts, :name)
{:automatic, put_in(state.producers[ref], process_name)}
end
def handle_subscribe(:consumer, _, from, state) do
{:automatic, update_in(state.consumers, &[from | &1])}
end
@impl true
def handle_cancel(_, {_, ref} = from, state) do
case pop_in(state.producers[ref]) do
{nil, _} ->
{:noreply, [], update_in(state.consumers, &List.delete(&1, from))}
{process_name, state} ->
maybe_resubscribe(process_name, state)
maybe_cancel(state)
{:noreply, [], state}
end
end
@impl true
def handle_info(:will_terminate, state) do
state = %{state | resubscribe: :never}
maybe_cancel(state)
{:noreply, [], state}
end
def handle_info(:cancel_consumers, %{terminator: terminator} = state) when terminator != nil do
if pid = Process.whereis(terminator) do
send(pid, {:done, self()})
end
{:noreply, [], state}
end
def handle_info(:cancel_consumers, %{callback: callback} = state) do
case callback.handle_info(:cancel_consumers, state) do
# If there are no events to emit we are done
{:noreply, [], state} ->
for from <- state.consumers do
send(self(), {:"$gen_producer", from, {:cancel, :shutdown}})
end
{:noreply, [], state}
# Otherwise we will try again later
other ->
GenStage.async_info(self(), :cancel_consumers)
other
end
end
def handle_info({:resubscribe, process_name}, state) do
subscribe(process_name, state)
{:noreply, [], state}
end
def handle_info(message, %{callback: callback} = state) do
callback.handle_info(message, state)
end
## Helpers
defp subscribe(process_name, state) do
if pid = Process.whereis(process_name) do
opts = [to: pid, name: process_name] ++ state.subscription_options
GenStage.async_subscribe(self(), opts)
true
else
maybe_resubscribe(process_name, state)
false
end
end
defp maybe_resubscribe(process_name, %{resubscribe: integer}) when is_integer(integer) do
Process.send_after(self(), {:resubscribe, process_name}, integer)
true
end
defp maybe_resubscribe(_, _), do: false
defp maybe_cancel(%{resubscribe: :never, producers: producers}) when producers == %{} do
GenStage.async_info(self(), :cancel_consumers)
true
end
defp maybe_cancel(_), do: false
end
| 29.866667 | 97 | 0.670089 |
7340f4e4b4c5230d97a95e637d25c989bce086e7 | 502 | ex | Elixir | web/controllers/csv_export_controller.ex | zombalo/cgrates_web_jsonapi | 47845be4311839fe180cc9f2c7c6795649da4430 | [
"MIT"
] | null | null | null | web/controllers/csv_export_controller.ex | zombalo/cgrates_web_jsonapi | 47845be4311839fe180cc9f2c7c6795649da4430 | [
"MIT"
] | null | null | null | web/controllers/csv_export_controller.ex | zombalo/cgrates_web_jsonapi | 47845be4311839fe180cc9f2c7c6795649da4430 | [
"MIT"
] | null | null | null | defmodule CgratesWebJsonapi.CsvExportController do
use CgratesWebJsonapi.Web, :controller
def index(conn, %{"table" => table}) do
conn = conn
|> put_resp_content_type("text/csv")
|> put_resp_header("content-disposition", "attachment; filename=export.csv")
|> send_chunked(200)
Repo.transaction fn ->
Repo
|> Ecto.Adapters.SQL.stream("COPY #{table} TO STDOUT CSV HEADER")
|> Stream.map(&(chunk(conn, &1.rows)))
|> Stream.run
end
conn
end
end
| 25.1 | 80 | 0.653386 |
734126b6453ca7057bb1174dee2c3b21ff1302f7 | 1,309 | exs | Elixir | test/changelog_web/controllers/podcast_controller_test.exs | type1fool/changelog.com | fbec3528cc3f5adfdc75b008bb92b17efc4f248f | [
"MIT"
] | 1 | 2020-05-20T16:58:17.000Z | 2020-05-20T16:58:17.000Z | test/changelog_web/controllers/podcast_controller_test.exs | type1fool/changelog.com | fbec3528cc3f5adfdc75b008bb92b17efc4f248f | [
"MIT"
] | null | null | null | test/changelog_web/controllers/podcast_controller_test.exs | type1fool/changelog.com | fbec3528cc3f5adfdc75b008bb92b17efc4f248f | [
"MIT"
] | null | null | null | defmodule ChangelogWeb.PodcastControllerTest do
use ChangelogWeb.ConnCase
test "getting the podcasts index", %{conn: conn} do
p1 = insert(:podcast)
p2 = insert(:podcast)
conn = get(conn, Routes.podcast_path(conn, :index))
assert conn.status == 200
assert conn.resp_body =~ p1.name
assert conn.resp_body =~ p2.name
end
test "getting a draft podcast page", %{conn: conn} do
p = insert(:podcast, status: :draft)
assert_raise Ecto.NoResultsError, fn ->
get(conn, Routes.podcast_path(conn, :show, p.slug))
end
end
test "getting a podcast page", %{conn: conn} do
p = insert(:podcast)
conn = get(conn, Routes.podcast_path(conn, :show, p.slug))
assert html_response(conn, 200) =~ p.name
end
test "getting a podcast page with a published episode", %{conn: conn} do
p = insert(:podcast)
e = insert(:published_episode, podcast: p)
i = episode_news_item(e) |> insert()
conn = get(conn, Routes.podcast_path(conn, :show, p.slug))
assert html_response(conn, 200) =~ p.name
assert String.contains?(conn.resp_body, i.headline)
end
test "getting a podcast page that doesn't exist", %{conn: conn} do
assert_raise Ecto.NoResultsError, fn ->
get conn, Routes.podcast_path(conn, :show, "bad-show")
end
end
end
| 30.44186 | 74 | 0.670741 |
73413798a93310505615aef7a7e5e963bb008454 | 2,017 | ex | Elixir | lib/distributed/replicator/gen_server.ex | ertgl/distributed | cb2ccb61069f9b86999e8fcfc1834f1a5537bffb | [
"MIT"
] | 24 | 2017-06-24T01:41:18.000Z | 2021-11-05T20:26:27.000Z | lib/distributed/replicator/gen_server.ex | ertgl/distributed | cb2ccb61069f9b86999e8fcfc1834f1a5537bffb | [
"MIT"
] | null | null | null | lib/distributed/replicator/gen_server.ex | ertgl/distributed | cb2ccb61069f9b86999e8fcfc1834f1a5537bffb | [
"MIT"
] | 2 | 2017-06-24T01:58:25.000Z | 2017-06-28T10:14:20.000Z | defmodule Distributed.Replicator.GenServer do
@moduledoc """
The functions in `Distributed.Replicator.GenServer` module helps to replicate an event by processing it on the all nodes in the network.
In `Distributed.Replicator.GenServer`, functions execute processes in parallel.
**Note**: Since this module is only a wrapper for `GenServer` module, there is no need to write a detailed documentation for this module.
Please check documentation of the `GenServer` module; you can basically think that the functions of the module run on every single node
without specifying nodes, and you will be replied with a list of results of the processes.
"""
use GenServer
@doc false
def start_link() do
GenServer.start_link(__MODULE__, [], name: __MODULE__.process_id())
end
@doc false
def init(_opts \\ []) do
{:ok, %{}}
end
@doc false
def process_id() do
Distributed.Replicator.GenServer
end
@doc """
Sends messages to the given dest on nodes and returns the message. See `Kernel.send/2`
"""
@spec info(dest :: pid | port | atom, msg :: any, opts :: [any]) :: any
def info(dest, msg, opts \\ []) do
Distributed.Parallel.map(Distributed.Node.list(opts), fn node_name ->
{node_name, send({dest, node_name}, msg)}
end)
end
@doc """
Makes synchronous calls to the servers on nodes and waits for their replies. See `GenServer.call/3`
"""
@spec call(server :: atom, term, opts :: [any]) :: [term]
def call(server, term, opts \\ []) do
timeout = Keyword.get(opts, :timeout, :infinity)
Distributed.Parallel.map(Distributed.Node.list(opts), fn node_name ->
{node_name, GenServer.call({server, node_name}, term, timeout)}
end)
end
@doc """
Sends asynchronous requests to the servers on nodes. See `GenServer.cast/2`
"""
@spec cast(server :: atom, term :: term, opts :: [any]) :: [term]
def cast(server, term, opts \\ []) do
Distributed.Parallel.map(Distributed.Node.list(opts), fn node_name ->
{node_name, GenServer.cast({server, node_name}, term)}
end)
end
end
| 33.616667 | 138 | 0.708478 |
734140c7f49fac28057a11831723790dca48ae0e | 491 | ex | Elixir | lib/booking_web/views/error_view.ex | mattiaslundberg/booking | 469d1469f306b2ab62ce1ee971a825101af6fc7e | [
"MIT"
] | null | null | null | lib/booking_web/views/error_view.ex | mattiaslundberg/booking | 469d1469f306b2ab62ce1ee971a825101af6fc7e | [
"MIT"
] | 2 | 2021-03-10T16:49:07.000Z | 2021-05-11T12:50:22.000Z | lib/booking_web/views/error_view.ex | mattiaslundberg/booking | 469d1469f306b2ab62ce1ee971a825101af6fc7e | [
"MIT"
] | null | null | null | defmodule BookingWeb.ErrorView do
use BookingWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 28.882353 | 61 | 0.735234 |
7341a4fb02086d281232140b1bff2eaed3422edf | 1,067 | ex | Elixir | priv/catalogue/surface/components/form/example01.ex | EddyLane/surface | 1f13259cbdf81b5a4740ee13349a48f8b6c54bb5 | [
"MIT"
] | 1 | 2021-04-30T14:28:08.000Z | 2021-04-30T14:28:08.000Z | priv/catalogue/surface/components/form/example01.ex | EddyLane/surface | 1f13259cbdf81b5a4740ee13349a48f8b6c54bb5 | [
"MIT"
] | null | null | null | priv/catalogue/surface/components/form/example01.ex | EddyLane/surface | 1f13259cbdf81b5a4740ee13349a48f8b6c54bb5 | [
"MIT"
] | null | null | null | defmodule Surface.Components.Form.Example01 do
use Surface.Catalogue.Example,
catalogue: Surface.Components.Catalogue,
subject: Surface.Components.Form,
height: "350px",
direction: "vertical"
alias Surface.Components.Form.{TextInput, Label, Field}
data user, :map, default: %{"name" => "", "email" => ""}
def render(assigns) do
~H"""
<Form for={{ :user }} change="change" submit="submit" opts={{ autocomplete: "off" }}>
<Field class="field" name="name">
<Label class="label"/>
<div class="control">
<TextInput class="input" value={{ @user["name"] }}/>
</div>
</Field>
<Field class="field" name="email">
<Label class="label">E-mail</Label>
<div class="control">
<TextInput class="input" value={{ @user["email"] }}/>
</div>
</Field>
</Form>
<pre>@user = {{ Jason.encode!(@user, pretty: true) }}</pre>
"""
end
def handle_event("change", %{"user" => params}, socket) do
{:noreply, assign(socket, :user, params)}
end
end
| 28.837838 | 89 | 0.578257 |
7341cdab69902cd41f1021a6e6154e6a304d3c28 | 4,789 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v28/model/floodlight_activity_group.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v28/model/floodlight_activity_group.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v28/model/floodlight_activity_group.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.DFAReporting.V28.Model.FloodlightActivityGroup do
@moduledoc """
Contains properties of a Floodlight activity group.
## Attributes
- accountId (String.t): Account ID of this floodlight activity group. This is a read-only field that can be left blank. Defaults to: `null`.
- advertiserId (String.t): Advertiser ID of this floodlight activity group. If this field is left blank, the value will be copied over either from the floodlight configuration's advertiser or from the existing activity group's advertiser. Defaults to: `null`.
- advertiserIdDimensionValue (DimensionValue): Dimension value for the ID of the advertiser. This is a read-only, auto-generated field. Defaults to: `null`.
- floodlightConfigurationId (String.t): Floodlight configuration ID of this floodlight activity group. This is a required field. Defaults to: `null`.
- floodlightConfigurationIdDimensionValue (DimensionValue): Dimension value for the ID of the floodlight configuration. This is a read-only, auto-generated field. Defaults to: `null`.
- id (String.t): ID of this floodlight activity group. This is a read-only, auto-generated field. Defaults to: `null`.
- idDimensionValue (DimensionValue): Dimension value for the ID of this floodlight activity group. This is a read-only, auto-generated field. Defaults to: `null`.
- kind (String.t): Identifies what kind of resource this is. Value: the fixed string \"dfareporting#floodlightActivityGroup\". Defaults to: `null`.
- name (String.t): Name of this floodlight activity group. This is a required field. Must be less than 65 characters long and cannot contain quotes. Defaults to: `null`.
- subaccountId (String.t): Subaccount ID of this floodlight activity group. This is a read-only field that can be left blank. Defaults to: `null`.
- tagString (String.t): Value of the type= parameter in the floodlight tag, which the ad servers use to identify the activity group that the activity belongs to. This is optional: if empty, a new tag string will be generated for you. This string must be 1 to 8 characters long, with valid characters being [a-z][A-Z][0-9][-][ _ ]. This tag string must also be unique among activity groups of the same floodlight configuration. This field is read-only after insertion. Defaults to: `null`.
- type (String.t): Type of the floodlight activity group. This is a required field that is read-only after insertion. Defaults to: `null`.
- Enum - one of [COUNTER, SALE]
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:accountId => any(),
:advertiserId => any(),
:advertiserIdDimensionValue => GoogleApi.DFAReporting.V28.Model.DimensionValue.t(),
:floodlightConfigurationId => any(),
:floodlightConfigurationIdDimensionValue =>
GoogleApi.DFAReporting.V28.Model.DimensionValue.t(),
:id => any(),
:idDimensionValue => GoogleApi.DFAReporting.V28.Model.DimensionValue.t(),
:kind => any(),
:name => any(),
:subaccountId => any(),
:tagString => any(),
:type => any()
}
field(:accountId)
field(:advertiserId)
field(:advertiserIdDimensionValue, as: GoogleApi.DFAReporting.V28.Model.DimensionValue)
field(:floodlightConfigurationId)
field(
:floodlightConfigurationIdDimensionValue,
as: GoogleApi.DFAReporting.V28.Model.DimensionValue
)
field(:id)
field(:idDimensionValue, as: GoogleApi.DFAReporting.V28.Model.DimensionValue)
field(:kind)
field(:name)
field(:subaccountId)
field(:tagString)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V28.Model.FloodlightActivityGroup do
def decode(value, options) do
GoogleApi.DFAReporting.V28.Model.FloodlightActivityGroup.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V28.Model.FloodlightActivityGroup do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 54.420455 | 495 | 0.738985 |
7341e66c1d01476a655a5256ca81bc06e2b06686 | 79 | exs | Elixir | server/config/config.exs | ludovicm67/poc-chat | dd710b36d6dee7b009bdc98f8cb911ed601583d3 | [
"MIT"
] | null | null | null | server/config/config.exs | ludovicm67/poc-chat | dd710b36d6dee7b009bdc98f8cb911ed601583d3 | [
"MIT"
] | null | null | null | server/config/config.exs | ludovicm67/poc-chat | dd710b36d6dee7b009bdc98f8cb911ed601583d3 | [
"MIT"
] | null | null | null | use Mix.Config
config :chat, port: 4242
# import_config "#{Mix.env}.exs"
| 13.166667 | 36 | 0.658228 |
7341f386ca00eaf6ab9cc9d00a9a387c4b02e0b9 | 2,103 | ex | Elixir | priv/templates/brando.install/lib/application_name_web/router.ex | univers-agency/brando | 69c3c52498a3f64518da3522cd9f27294a52cc68 | [
"Apache-2.0"
] | 1 | 2020-04-26T09:53:02.000Z | 2020-04-26T09:53:02.000Z | priv/templates/brando.install/lib/application_name_web/router.ex | univers-agency/brando | 69c3c52498a3f64518da3522cd9f27294a52cc68 | [
"Apache-2.0"
] | 198 | 2019-08-20T16:16:07.000Z | 2020-07-03T15:42:07.000Z | priv/templates/brando.install/lib/application_name_web/router.ex | univers-agency/brando | 69c3c52498a3f64518da3522cd9f27294a52cc68 | [
"Apache-2.0"
] | null | null | null | defmodule <%= application_module %>Web.Router do
use <%= application_module %>Web, :router
import Brando.Plug.I18n
import Brando.Router
import Phoenix.LiveDashboard.Router
import Plug.BasicAuth
import BrandoAdmin.UserAuth
@sql_sandbox Application.compile_env(:<%= application_name %>, :sql_sandbox) || false
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug RemoteIp
plug :put_locale
plug :protect_from_forgery
plug :put_secure_browser_headers
plug :put_extra_secure_browser_headers
plug PlugHeartbeat
plug Brando.Plug.Identity
plug Brando.Plug.Navigation, key: "main", as: :navigation
plug Brando.Plug.Fragment, parent_key: "partials", as: :partials
# plug :put_meta, %{
# "google-site-verification" => "GSV"
# }
end
pipeline :browser_api do
plug :accepts, ["html"]
plug :fetch_session
plug :protect_from_forgery
plug :put_secure_browser_headers
# plug :put_extra_secure_browser_headers
end
pipeline :basic_httpauth do
plug :basic_auth, username: "admin", password: "<%= Brando.Utils.random_string(10) %>"
end
if @sql_sandbox do
forward "/__e2e", Brando.Plug.E2ETest
end
scope "/__dashboard" do
pipe_through [:browser, :basic_httpauth]
live_dashboard "/", metrics: <%= application_module %>Web.Telemetry
end
admin_routes do
live "/", <%= application_module %>Admin.DashboardLive
# live "/projects", <%= application_module %>Admin.Projects.ProjectListLive
# live "/projects/create", <%= application_module %>Admin.Projects.ProjectCreateLive
# live "/projects/update/:entry_id", <%= application_module %>Admin.Projects.ProjectUpdateLive
end
scope "/coming-soon", <%= application_module %>Web do
get "/", LockdownController, :index
post "/", LockdownController, :post_password
end
scope "/api", <%= application_module %>Web do
pipe_through :browser_api
# get "/projects/all/:page", PostController, :api_get
end
scope "/" do
pipe_through :browser
page_routes()
end
end
| 28.808219 | 98 | 0.704232 |
734204993de74fec2ca2d980f993bf958e2b92b7 | 6,464 | ex | Elixir | lib/ex_dav/dav_provider.ex | SteffenDE/ex_dav | 7f3648520e78479021e4269c4714f2069214e311 | [
"MIT"
] | null | null | null | lib/ex_dav/dav_provider.ex | SteffenDE/ex_dav | 7f3648520e78479021e4269c4714f2069214e311 | [
"MIT"
] | 1 | 2021-04-14T10:40:54.000Z | 2021-04-14T10:40:54.000Z | lib/ex_dav/dav_provider.ex | SteffenDE/ex_dav | 7f3648520e78479021e4269c4714f2069214e311 | [
"MIT"
] | null | null | null | defmodule ExDav.DavProvider do
@moduledoc """
This module defines the basic callbacks for an ExDAV Dav Provider.
"""
@type ref :: any()
@doc """
The main function that is called for any incoming request. Used to resolve the
"""
@callback resolve(conn :: Plug.Conn.t(), opts :: list()) :: ref()
@doc """
Used to format the resource into the `ExDav.DavResource` struct.
A default implementation is provided when using this module.
"""
@callback to_dav_struct(ref()) :: ExDav.DavResource.t()
@doc """
Defines whether the provider is read-only. Currently we only support read-only providers.
By default, always returns `false`.
"""
@callback read_only() :: boolean()
@doc """
Defines whether the resource supports range requests.
If `true` the provider MUST handle the range option in `get_content/2` / `get_stream/2`.
By default, always returns `false`.
"""
@callback supports_ranges(ref()) :: boolean()
@doc """
Defines whether the resource is streamed.
If `true` the provider MUST implement the `get_stream/2` function.
By default, always returns `false`.
"""
@callback supports_streaming(ref()) :: boolean()
@doc """
Defines whether the resource supports the Content-Length header.
If `true` the provider MUST implement the `get_content_length/1` function.
If `false` the response in sent without a Content-Length header in chunked encoding.
By default, always returns `false`.
"""
@callback supports_content_length(ref()) :: boolean()
@doc """
Defines whether the resource supports the Etag header.
If `true` the provider MUST implement the `get_etag/1` function.
By default, always returns `false`.
"""
@callback supports_etag(ref()) :: boolean()
@doc """
Used to check if the opaque reference is a collection.
Providers MUST implement this callback.
"""
@callback is_collection(ref()) :: boolean()
@doc """
Used to set the `displayname` property.
By default, returns the last path segment.
"""
@callback get_display_name(ref()) :: String.t()
@doc """
Used to get the members of a collection.
Only called for collections.
Providers MUST implement this callback if they server any collection.
"""
@callback get_members(ref()) :: [String.t()]
@doc """
Used to set the href location of a resource. Resources can have many locations, as paths are case sensitive.
This method should return the preferred path, e.g. a downcased version of the path.
Providers SHOULD implement this callback.
"""
@callback get_preferred_path(ref()) :: String.t()
@doc """
Used to get the creation date.
Providers SHOULD implement this callback, especially for non-collections.
"""
@callback get_creation_date(ref()) :: NaiveDateTime.t()
@doc """
Used to get the modification date.
Providers SHOULD implement this callback, especially for non-collections.
"""
@callback get_last_modified(ref()) :: NaiveDateTime.t()
@doc """
Used to get the Content-Length header.
Providers SHOULD implement this callback, especially for non-collections.
"""
@callback get_content_length(ref()) :: non_neg_integer() | nil
@doc """
Used to get the Content-Type header.
Providers SHOULD implement this callback, especially for non-collections.
Defaults to `application/octet-stream`.
"""
@callback get_content_type(ref()) :: String.t()
@doc """
Used to get the Etag header.
Providers SHOULD implement this callback, especially for non-collections.
"""
@callback get_etag(ref()) :: String.t() | nil
@doc """
Used to get the response body for GET requests.
Providers MUST implement this callback unless `supports_streaming/1` is `true` for the resource.
"""
@callback get_content(ref(), opts :: Keyword.t()) :: binary()
@doc """
Used to get the response body for GET requests in a memory efficient way.
Providers MUST only implement this callback if `supports_streaming/1` is `true` for the resource.
Providers SHOULD implement this callback when serving large files.
"""
@callback get_stream(ref(), opts :: Keyword.t()) :: Enumerable.t()
defmacro __using__(_) do
module = __CALLER__.module
quote do
@behaviour ExDav.DavProvider
@impl true
def read_only, do: true
@impl true
def supports_ranges(_), do: false
@impl true
def supports_streaming(_), do: false
@impl true
def supports_content_length(_), do: false
@impl true
def supports_etag(_), do: false
@impl true
def get_display_name(ref) do
path = unquote(module).get_preferred_path(ref)
List.last(String.split(path, "/"))
end
@impl true
def get_etag(_), do: nil
@impl true
def get_content_length(_), do: nil
@impl true
def get_content_type(ref) do
if unquote(module).is_collection(ref) do
"application/x-directory"
else
"application/octet-stream"
end
end
defp map_ref(nil, _), do: nil
defp map_ref(ref, depth) do
props = %{
creationdate:
unquote(module).get_creation_date(ref)
|> Calendar.strftime("%a, %d %b %Y %H:%M:%S GMT"),
displayname: unquote(module).get_display_name(ref),
getcontentlength: unquote(module).get_content_length(ref),
getcontenttype: unquote(module).get_content_type(ref),
getetag: unquote(module).get_etag(ref),
getlastmodified:
unquote(module).get_last_modified(ref)
|> Calendar.strftime("%a, %d %b %Y %H:%M:%S GMT")
}
if unquote(module).is_collection(ref) do
props = Map.put(props, :resourcetype, "<collection/>")
%ExDav.DavResource{
href: unquote(module).get_preferred_path(ref) |> URI.encode(),
props: props,
children:
if depth == 0 do
unquote(module).get_members(ref)
|> Enum.map(fn mem -> map_ref(mem, depth + 1) end)
else
[]
end
}
else
%ExDav.DavResource{
href: unquote(module).get_preferred_path(ref) |> URI.encode(),
props: props,
children: nil
}
end
end
@impl true
def to_dav_struct(ref) do
map_ref(ref, 0)
end
defoverridable ExDav.DavProvider
end
end
end
| 28.227074 | 110 | 0.647432 |
73421446a308a5d4025c04e7bd1e71640dcaa183 | 547 | exs | Elixir | mix.exs | ckampfe/regex_rs | 2b5de81bff201c849cbf17d5b6dac5c6d9abab71 | [
"BSD-3-Clause"
] | 1 | 2021-01-15T02:05:22.000Z | 2021-01-15T02:05:22.000Z | mix.exs | ckampfe/regex_rs | 2b5de81bff201c849cbf17d5b6dac5c6d9abab71 | [
"BSD-3-Clause"
] | null | null | null | mix.exs | ckampfe/regex_rs | 2b5de81bff201c849cbf17d5b6dac5c6d9abab71 | [
"BSD-3-Clause"
] | null | null | null | defmodule RegexRs.MixProject do
use Mix.Project
def project do
[
app: :regex_rs,
version: "0.1.0",
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod || Mix.env() == :bench,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:benchee, "~> 1.0", only: [:dev, :bench]},
{:rustler, "~> 0.22"}
]
end
end
| 18.862069 | 65 | 0.552102 |
7342301895a5826278c2b1e1750fbf99b962dcc9 | 284 | ex | Elixir | lib/janus.ex | couchemar/elixir-janus | 50dd60ff056ea93dd282fb8ea050d1addf4bfc5d | [
"MIT"
] | 41 | 2016-12-21T13:28:14.000Z | 2021-09-11T14:48:05.000Z | lib/janus.ex | meetupcall/elixir-janus | af24ac04ef61d40ea0a582c0d182826d2075eb72 | [
"MIT"
] | 11 | 2017-01-04T20:22:16.000Z | 2020-09-07T06:29:22.000Z | lib/janus.ex | meetupcall/elixir-janus | af24ac04ef61d40ea0a582c0d182826d2075eb72 | [
"MIT"
] | 13 | 2016-12-21T13:28:16.000Z | 2021-05-15T07:48:50.000Z | import Janus.Util
defmodule Janus do
@moduledoc """
This library is a client for the [Janus REST API](https://janus.conf.meetecho.com/docs/rest.html).
"""
@doc """
Retrieves details on the Janus server located at `url`
"""
def info(url), do: get("#{url}/info")
end
| 17.75 | 100 | 0.658451 |
734238fbf7051f5d7951c87f54a8748c8125b24c | 1,903 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/channel_statistics.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/channel_statistics.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/channel_statistics.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Model.ChannelStatistics do
@moduledoc """
Statistics about a channel: number of subscribers, number of videos in the channel, etc.
## Attributes
- commentCount (String.t): The number of comments for the channel. Defaults to: `null`.
- hiddenSubscriberCount (boolean()): Whether or not the number of subscribers is shown for this user. Defaults to: `null`.
- subscriberCount (String.t): The number of subscribers that the channel has. Defaults to: `null`.
- videoCount (String.t): The number of videos uploaded to the channel. Defaults to: `null`.
- viewCount (String.t): The number of times the channel has been viewed. Defaults to: `null`.
"""
defstruct [
:commentCount,
:hiddenSubscriberCount,
:subscriberCount,
:videoCount,
:viewCount
]
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.ChannelStatistics do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.ChannelStatistics do
def encode(value, options) do
GoogleApi.YouTube.V3.Deserializer.serialize_non_nil(value, options)
end
end
| 36.596154 | 124 | 0.748292 |
734265519417218181fb40698f371f808de93874 | 1,187 | exs | Elixir | mix.exs | Userpilot/locksmith | 33b4e9abbe4b472c3e7dab246202d84400719d94 | [
"MIT"
] | null | null | null | mix.exs | Userpilot/locksmith | 33b4e9abbe4b472c3e7dab246202d84400719d94 | [
"MIT"
] | null | null | null | mix.exs | Userpilot/locksmith | 33b4e9abbe4b472c3e7dab246202d84400719d94 | [
"MIT"
] | null | null | null | defmodule Locksmith.MixProject do
use Mix.Project
@scm_url "https://github.com/userpilot/locksmith"
def project do
[
app: :locksmith,
version: "1.0.0",
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps(),
description:
"Queue-free/gen_server-free/process-free locking mechanism built for high concurrency.",
package: package(),
# ExDoc configurations
name: "Locksmith",
source_url: @scm_url,
homepage_url: @scm_url,
docs: [
# main: "README",
extras: ["README.md"]
]
]
end
def application do
[
mod: {Locksmith.Application, []},
extra_applications: [:logger]
]
end
defp deps do
[
{:eternal, "~> 1.2"},
# Development dependencies
{:credo, "~> 1.5", only: :dev, runtime: false},
{:ex_doc, "~> 0.24", only: :dev, runtime: false},
{:dialyxir, "~> 1.0", only: :dev, runtime: false}
]
end
defp package do
[
maintainers: ["Ameer A."],
licenses: ["MIT"],
links: %{
"GitHub" => @scm_url,
"Userpilot" => "https://userpilot.com"
}
]
end
end
| 20.824561 | 96 | 0.539174 |
734279b26b1595dc17a55757ed9fcb8bf8937127 | 74,345 | ex | Elixir | lib/ecto/schema.ex | barthez/ecto | 747ea7e207132f7b72451c9d3942acfe5db0aa7b | [
"Apache-2.0"
] | null | null | null | lib/ecto/schema.ex | barthez/ecto | 747ea7e207132f7b72451c9d3942acfe5db0aa7b | [
"Apache-2.0"
] | null | null | null | lib/ecto/schema.ex | barthez/ecto | 747ea7e207132f7b72451c9d3942acfe5db0aa7b | [
"Apache-2.0"
] | 1 | 2018-09-21T16:05:29.000Z | 2018-09-21T16:05:29.000Z | defmodule Ecto.Schema do
@moduledoc ~S"""
Defines a schema.
An Ecto schema is used to map any data source into an Elixir struct.
One of such use cases is to map data coming from a repository,
usually a table, into Elixir structs.
## Example
defmodule User do
use Ecto.Schema
schema "users" do
field :name, :string
field :age, :integer, default: 0
has_many :posts, Post
end
end
By default, a schema will automatically generate a primary key which is named
`id` and of type `:integer`. The `field` macro defines a field in the schema
with given name and type. `has_many` associates many posts with the user
schema.
Note that the name of the database table does not need to correlate to your
module name. For example, if you are working with a legacy database, you can
reference the table name when you define your schema:
defmodule User do
use Ecto.Schema
schema "legacy_users" do
# ... fields ...
end
end
## Schema attributes
Supported attributes, to be set beforehand, for configuring the defined schema.
These attributes are:
* `@primary_key` - configures the schema primary key. It expects
a tuple `{field_name, type, options}` with the primary key field
name, type (typically `:id` or `:binary_id`, but can be any type) and
options. Defaults to `{:id, :id, autogenerate: true}`. When set
to `false`, does not define a primary key in the schema unless
composite keys are defined using the options of `field`.
* `@schema_prefix` - configures the schema prefix. Defaults to `nil`,
which generates structs and queries without prefix. When set, the
prefix will be used by every built struct and on queries whenever
the schema is used in a `from` or a `join`. In PostgreSQL, the prefix
is called "SCHEMA" (typically set via Postgres' `search_path`).
In MySQL the prefix points to databases.
* `@foreign_key_type` - configures the default foreign key type
used by `belongs_to` associations. Defaults to `:id`;
* `@timestamps_opts` - configures the default timestamps type
used by `timestamps`. Defaults to `[type: :naive_datetime]`;
* `@derive` - the same as `@derive` available in `Kernel.defstruct/1`
as the schema defines a struct behind the scenes;
* `@field_source_mapper` - a function that receives the current field name
and returns the mapping of this field name in the underlying source.
In other words, it is a mechanism to automatically generate the `:source`
option for the `field` macro. It defaults to `fn x -> x end`, where no
field transformation is done;
The advantage of configuring the schema via those attributes is
that they can be set with a macro to configure application wide
defaults.
For example, if your database does not support autoincrementing
primary keys and requires something like UUID or a RecordID, you
can configure and use`:binary_id` as your primary key type as follows:
# Define a module to be used as base
defmodule MyApp.Schema do
defmacro __using__(_) do
quote do
use Ecto.Schema
@primary_key {:id, :binary_id, autogenerate: true}
@foreign_key_type :binary_id
end
end
end
# Now use MyApp.Schema to define new schemas
defmodule MyApp.Comment do
use MyApp.Schema
schema "comments" do
belongs_to :post, MyApp.Post
end
end
Any schemas using `MyApp.Schema` will get the `:id` field with type
`:binary_id` as the primary key. We explain what the `:binary_id` type
entails in the next section.
The `belongs_to` association on `MyApp.Comment` will also define
a `:post_id` field with `:binary_id` type that references the `:id`
field of the `MyApp.Post` schema.
## Primary keys
Ecto supports two ID types, called `:id` and `:binary_id`, which are
often used as the type for primary keys and associations.
The `:id` type is used when the primary key is an integer while the
`:binary_id` is used for primary keys in particular binary formats,
which may be `Ecto.UUID` for databases like PostgreSQL and MySQL,
or some specific ObjectID or RecordID often imposed by NoSQL databases.
In both cases, both types have their semantics specified by the
underlying adapter/database. If you use the `:id` type with
`:autogenerate`, it means the database will be responsible for
auto-generation of the id. This is often the case for primary keys
in relational databases which are auto-incremented.
Similarly, the `:binary_id` type may be generated in the adapter
for cases like UUID but it may also be handled by the database if
required. In any case, both scenarios are handled transparently by
Ecto.
Besides `:id` and `:binary_id`, which are often used by primary
and foreign keys, Ecto provides a huge variety of types to be used
by any column.
Ecto also supports composite primary keys.
If your primary key is not named "id" (e.g. if you are working with a
legacy database), you can use the `@primary_key` attribute to configure
your key name using the `source` option. For example, the following
attribute defines an integer primary key named `legacy_id` which is
automatically incremented by the database:
@primary_key {:id, :integer, autogenerate: false, source: :legacy_id}
## Types and casting
When defining the schema, types need to be given. Types are split
into two categories, primitive types and custom types.
### Primitive types
The primitive types are:
Ecto type | Elixir type | Literal syntax in query
:---------------------- | :---------------------- | :---------------------
`:id` | `integer` | 1, 2, 3
`:binary_id` | `binary` | `<<int, int, int, ...>>`
`:integer` | `integer` | 1, 2, 3
`:float` | `float` | 1.0, 2.0, 3.0
`:boolean` | `boolean` | true, false
`:string` | UTF-8 encoded `string` | "hello"
`:binary` | `binary` | `<<int, int, int, ...>>`
`{:array, inner_type}` | `list` | `[value, value, value, ...]`
`:map` | `map` |
`{:map, inner_type}` | `map` |
`:decimal` | [`Decimal`](https://github.com/ericmj/decimal) |
`:date` | `Date` |
`:time` | `Time` |
`:naive_datetime` | `NaiveDateTime` |
`:naive_datetime_usec` | `NaiveDateTime` |
`:utc_datetime` | `DateTime` |
`:utc_datetime_usec` | `DateTime` |
**Note:** For the `{:array, inner_type}` and `{:map, inner_type}` type,
replace `inner_type` with one of the valid types, such as `:string`.
### Custom types
Besides providing primitive types, Ecto allows custom types to be
implemented by developers, allowing Ecto behaviour to be extended.
A custom type is a module that implements the `Ecto.Type` behaviour.
By default, Ecto provides the following custom types:
Custom type | Database type | Elixir type
:---------------------- | :---------------------- | :---------------------
`Ecto.UUID` | `:uuid` | `uuid-string`
Read the `Ecto.Type` documentation for more information on implementing
your own types.
Finally, schemas can also have virtual fields by passing the
`virtual: true` option. These fields are not persisted to the database
and can optionally not be type checked by declaring type `:any`.
### The datetime types
Four different datetime primitive types are available:
* `naive_datetime` - has a precision of seconds and casts values
to Elixir's `NaiveDateTime` struct which has no timezone information.
* `naive_datetime_usec` - has a default precision of microseconds and
also casts values to `NaiveDateTime` with no timezone information.
* `utc_datetime` - has a precision of seconds and casts values to
to Elixir's `DateTime` struct and expects the time zone to be set to UTC.
* `utc_datetime_usec` has a default precision of microseconds and also
casts values to `DateTime` expecting the time zone be set to UTC.
Having these different types allows developers to choose a type that will
be compatible with the database and your project's precision requirements.
For example, some older versions of MySQL do not support microseconds in
datetime fields.
When choosing what datetime type to work with, keep in mind that Elixir
functions like `NaiveDateTime.utc_now/0` have a default precision of 6.
Casting a value with a precision greater than 0 to a non-`usec` type will
truncate all microseconds and set the precision to 0.
### The map type
The map type allows developers to store an Elixir map directly
in the database:
# In your migration
create table(:users) do
add :data, :map
end
# In your schema
field :data, :map
# Now in your code
user = Repo.insert! %User{data: %{"foo" => "bar"}}
Keep in mind that we advise the map keys to be strings or integers
instead of atoms. Atoms may be accepted depending on how maps are
serialized but the database will always return atom keys as strings
due to security reasons.
In order to support maps, different databases may employ different
techniques. For example, PostgreSQL will store those values in jsonb
fields, allowing you to just query parts of it. MySQL and MSSQL, on
the other hand, do not yet provide a JSON type, so the value will be
stored in a text field.
For maps to work in such databases, Ecto will need a JSON library.
By default Ecto will use [Jason](http://github.com/michalmuskala/jason)
which needs to be added to your deps in `mix.exs`:
{:jason, "~> 1.0"}
You can however configure the adapter to use another library. For example,
if using Postgres:
config :postgrex, :json_library, YourLibraryOfChoice
Or if using MySQL:
config :mariaex, :json_library, YourLibraryOfChoice
If changing the JSON library, remember to recompile the adapter afterwards
by cleaning the current build:
mix deps.clean --build postgrex
### Casting
When directly manipulating the struct, it is the responsibility of
the developer to ensure the field values have the proper type. For
example, you can create a user struct with an invalid value
for `age`:
iex> user = %User{age: "0"}
iex> user.age
"0"
However, if you attempt to persist the struct above, an error will
be raised since Ecto validates the types when sending them to the
adapter/database.
Therefore, when working with and manipulating external data, it is
recommended to use `Ecto.Changeset`'s that are able to filter
and properly cast external data:
changeset = Ecto.Changeset.cast(%User{}, %{"age" => "0"}, [:age])
user = Repo.insert!(changeset)
**You can use Ecto schemas and changesets to cast and validate any kind
of data, regardless if the data will be persisted to an Ecto repository
or not**.
## Reflection
Any schema module will generate the `__schema__` function that can be
used for runtime introspection of the schema:
* `__schema__(:source)` - Returns the source as given to `schema/2`;
* `__schema__(:prefix)` - Returns optional prefix for source provided by
`@schema_prefix` schema attribute;
* `__schema__(:primary_key)` - Returns a list of primary key fields (empty if there is none);
* `__schema__(:fields)` - Returns a list of all non-virtual field names;
* `__schema__(:field_source, field)` - Returns the alias of the given field;
* `__schema__(:type, field)` - Returns the type of the given non-virtual field;
* `__schema__(:associations)` - Returns a list of all association field names;
* `__schema__(:association, assoc)` - Returns the association reflection of the given assoc;
* `__schema__(:embeds)` - Returns a list of all embedded field names;
* `__schema__(:embed, embed)` - Returns the embedding reflection of the given embed;
* `__schema__(:read_after_writes)` - Non-virtual fields that must be read back
from the database after every write (insert or update);
* `__schema__(:autogenerate_id)` - Primary key that is auto generated on insert;
Furthermore, both `__struct__` and `__changeset__` functions are
defined so structs and changeset functionalities are available.
"""
@type source :: String.t
@type prefix :: String.t | nil
@type schema :: %{optional(atom) => any, __struct__: atom, __meta__: Ecto.Schema.Metadata.t}
@type embedded_schema :: %{optional(atom) => any, __struct__: atom}
@type t :: schema | embedded_schema
defmodule Metadata do
@moduledoc """
Stores metadata of a struct.
## State
The state of the schema is stored in the `:state` field and allows
following values:
* `:built` - the stuct was constructed in memory and is not persisted
to database yet;
* `:loaded` - the struct was loaded from database and represents
persisted data;
* `:deleted` - the struct was deleted and no longer represents persisted
data.
## Source
The `:source` tracks the (table or collection) where the struct is or should
be persisted to.
## Prefix
Tracks the source prefix in the data storage.
## Context
The `:context` field represents additional state some databases require
for proper updates of data. It is not used by the built-in adapters of
`Ecto.Adapters.Postres` and `Ecto.Adapters.MySQL`.
## Schema
The `:schema` field refers the module name for the schema this metadata belongs to.
"""
defstruct [:state, :source, :context, :schema, :prefix]
@type state :: :built | :loaded | :deleted
@type t :: %__MODULE__{
context: any,
prefix: Ecto.Schema.prefix,
schema: module,
source: Ecto.Schema.source,
state: state,
}
defimpl Inspect do
import Inspect.Algebra
def inspect(metadata, opts) do
%{source: source, prefix: prefix, state: state, context: context} = metadata
entries =
for entry <- [state, prefix, source, context],
entry != nil,
do: to_doc(entry, opts)
concat ["#Ecto.Schema.Metadata<"] ++ Enum.intersperse(entries, ", ") ++ [">"]
end
end
end
@doc false
defmacro __using__(_) do
quote do
import Ecto.Schema, only: [schema: 2, embedded_schema: 1]
@primary_key nil
@timestamps_opts []
@foreign_key_type :id
@schema_prefix nil
@field_source_mapper fn x -> x end
Module.register_attribute(__MODULE__, :ecto_primary_keys, accumulate: true)
Module.register_attribute(__MODULE__, :ecto_fields, accumulate: true)
Module.register_attribute(__MODULE__, :ecto_field_sources, accumulate: true)
Module.register_attribute(__MODULE__, :ecto_assocs, accumulate: true)
Module.register_attribute(__MODULE__, :ecto_embeds, accumulate: true)
Module.register_attribute(__MODULE__, :ecto_raw, accumulate: true)
Module.register_attribute(__MODULE__, :ecto_autogenerate, accumulate: true)
Module.register_attribute(__MODULE__, :ecto_autoupdate, accumulate: true)
Module.put_attribute(__MODULE__, :ecto_autogenerate_id, nil)
end
end
@doc """
Defines an embedded schema.
An embedded schema does not require a source name
and it does not include a metadata field.
Embedded schemas by default set the primary key type
to `:binary_id` but such can be configured with the
`@primary_key` attribute.
Embedded schemas don't define the `__meta__` field.
"""
defmacro embedded_schema([do: block]) do
schema(nil, false, :binary_id, block)
end
@doc """
Defines a schema struct with a source name and field definitions.
An additional field called `__meta__` is added to the struct for storing
internal Ecto state. This field always has a `Ecto.Schema.Metadata` struct
as value and can be manipulated with the `Ecto.put_meta/2` function.
"""
defmacro schema(source, [do: block]) do
schema(source, true, :id, block)
end
defp schema(source, meta?, type, block) do
prelude =
quote do
@after_compile Ecto.Schema
Module.register_attribute(__MODULE__, :changeset_fields, accumulate: true)
Module.register_attribute(__MODULE__, :struct_fields, accumulate: true)
meta? = unquote(meta?)
source = unquote(source)
prefix = @schema_prefix
# Those module attributes are accessed only dynamically
# so we explicitly reference them here to avoid warnings.
_ = @foreign_key_type
_ = @timestamps_opts
if meta? do
unless is_binary(source) do
raise ArgumentError, "schema source must be a string, got: #{inspect source}"
end
meta = %Metadata{state: :built, source: source, prefix: prefix, schema: __MODULE__}
Module.put_attribute(__MODULE__, :struct_fields, {:__meta__, meta})
end
if @primary_key == nil do
@primary_key {:id, unquote(type), autogenerate: true}
end
primary_key_fields =
case @primary_key do
false ->
[]
{name, type, opts} ->
Ecto.Schema.__field__(__MODULE__, name, type, [primary_key: true] ++ opts)
[name]
other ->
raise ArgumentError, "@primary_key must be false or {name, type, opts}"
end
try do
import Ecto.Schema
unquote(block)
after
:ok
end
end
postlude =
quote unquote: false do
primary_key_fields = @ecto_primary_keys |> Enum.reverse
autogenerate = @ecto_autogenerate |> Enum.reverse
autoupdate = @ecto_autoupdate |> Enum.reverse
fields = @ecto_fields |> Enum.reverse
field_sources = @ecto_field_sources |> Enum.reverse
assocs = @ecto_assocs |> Enum.reverse
embeds = @ecto_embeds |> Enum.reverse
defstruct @struct_fields
def __changeset__ do
%{unquote_splicing(Macro.escape(@changeset_fields))}
end
def __schema__(:prefix), do: unquote(prefix)
def __schema__(:source), do: unquote(source)
def __schema__(:fields), do: unquote(Enum.map(fields, &elem(&1, 0)))
def __schema__(:primary_key), do: unquote(primary_key_fields)
def __schema__(:hash), do: unquote(:erlang.phash2({primary_key_fields, fields}))
def __schema__(:read_after_writes), do: unquote(Enum.reverse(@ecto_raw))
def __schema__(:autogenerate_id), do: unquote(Macro.escape(@ecto_autogenerate_id))
def __schema__(:autogenerate), do: unquote(Macro.escape(autogenerate))
def __schema__(:autoupdate), do: unquote(Macro.escape(autoupdate))
def __schema__(:query) do
%Ecto.Query{
from: %Ecto.Query.FromExpr{
source: {unquote(source), __MODULE__},
prefix: unquote(prefix)
}
}
end
for clauses <- Ecto.Schema.__schema__(fields, field_sources, assocs, embeds),
{args, body} <- clauses do
def __schema__(unquote_splicing(args)), do: unquote(body)
end
end
quote do
unquote(prelude)
unquote(postlude)
end
end
## API
@doc """
Defines a field on the schema with given name and type.
The field name will be used as is to read and write to the database
by all of the built-in adapters unless overridden with the `:source`
option.
## Options
* `:default` - Sets the default value on the schema and the struct.
The default value is calculated at compilation time, so don't use
expressions like `DateTime.utc_now` or `Ecto.UUID.generate` as
they would then be the same for all records.
* `:source` - Defines the name that is to be used in database for this field.
This is useful when attaching to an existing database. The value should be
an atom.
* `:autogenerate` - a `{module, function, args}` tuple for a function
to call to generate the field value before insertion if value is not set.
A shorthand value of `true` is equivalent to `{type, :autogenerate, []}`.
* `:read_after_writes` - When true, the field is always read back
from the database after insert and updates.
For relational databases, this means the RETURNING option of those
statements is used. For this reason, MySQL does not support this
option and will raise an error if a schema is inserted/updated with
read after writes fields.
* `:virtual` - When true, the field is not persisted to the database.
Notice virtual fields do not support `:autogenerate` nor
`:read_after_writes`.
* `:primary_key` - When true, the field is used as part of the
composite primary key
"""
defmacro field(name, type \\ :string, opts \\ []) do
quote do
Ecto.Schema.__field__(__MODULE__, unquote(name), unquote(type), unquote(opts))
end
end
@doc """
Generates `:inserted_at` and `:updated_at` timestamp fields.
The fields generated by this macro will automatically be set to
the current time when inserting and updating values in a repository.
## Options
* `:type` - the timestamps type, defaults to `:naive_datetime`.
* `:inserted_at` - the name of the column for insertion times or `false`
* `:updated_at` - the name of the column for update times or `false`
* `:autogenerate` - a module-function-args tuple used for generating
both `inserted_at` and `updated_at` timestamps
All options can be pre-configured by setting `@timestamps_opts`.
"""
defmacro timestamps(opts \\ []) do
quote bind_quoted: binding() do
timestamps =
[inserted_at: :inserted_at, updated_at: :updated_at, type: :naive_datetime]
|> Keyword.merge(@timestamps_opts)
|> Keyword.merge(opts)
type = Keyword.fetch!(timestamps, :type)
autogen = timestamps[:autogenerate] || {Ecto.Schema, :__timestamps__, [type]}
if inserted_at = Keyword.fetch!(timestamps, :inserted_at) do
Ecto.Schema.field(inserted_at, type, [])
Module.put_attribute(__MODULE__, :ecto_autogenerate, {inserted_at, autogen})
end
if updated_at = Keyword.fetch!(timestamps, :updated_at) do
Ecto.Schema.field(updated_at, type, [])
Module.put_attribute(__MODULE__, :ecto_autogenerate, {updated_at, autogen})
Module.put_attribute(__MODULE__, :ecto_autoupdate, {updated_at, autogen})
end
end
end
@doc ~S"""
Indicates a one-to-many association with another schema.
The current schema has zero or more records of the other schema. The other
schema often has a `belongs_to` field with the reverse association.
## Options
* `:foreign_key` - Sets the foreign key, this should map to a field on the
other schema, defaults to the underscored name of the current schema
suffixed by `_id`
* `:references` - Sets the key on the current schema to be used for the
association, defaults to the primary key on the schema
* `:through` - Allow this association to be defined in terms of existing
associations. Read the section on `:through` associations for more info
* `:on_delete` - The action taken on associations when parent record
is deleted. May be `:nothing` (default), `:nilify_all` and `:delete_all`.
Notice `:on_delete` may also be set in migrations when creating a
reference. If supported, relying on the database via migrations
is preferred. `:nilify_all` and `:delete_all` will not cascade to child
records unless set via database migrations.
* `:on_replace` - The action taken on associations when the record is
replaced when casting or manipulating parent changeset. May be
`:raise` (default), `:mark_as_invalid`, `:nilify`, or `:delete`.
See `Ecto.Changeset`'s section on related data for more info.
* `:defaults` - Default values to use when building the association
* `:where` - A filter for the association. See "Filtering associations" below.
It does not apply to `:through` associations.
## Examples
defmodule Post do
use Ecto.Schema
schema "posts" do
has_many :comments, Comment
end
end
# Get all comments for a given post
post = Repo.get(Post, 42)
comments = Repo.all assoc(post, :comments)
# The comments can come preloaded on the post struct
[post] = Repo.all(from(p in Post, where: p.id == 42, preload: :comments))
post.comments #=> [%Comment{...}, ...]
`has_many` can be used to define hierachical relationships within a single
schema, for example threaded comments.
defmodule Comment do
use Ecto.Schema
schema "comments" do
field :content, :string
field :parent_id, :integer
belongs_to :parent, Comment, foreign_key: :id, references: :parent_id, define_field: false
has_many :children, Comment, foreign_key: :parent_id, references: :id
end
end
## Filtering associations
A query can also be given instead of a schema. Querying, joining or preloading
the association will use the given query. Currently only where clauses can be
provided in queries. Let's see some examples:
defmodule Post do
use Ecto.Schema
schema "posts" do
has_many :public_comments, Comment,
where: [public: true]
has_many :deleted_comments, Comment,
where: dynamic([c], not(is_nil(comment.deleted_at)))
end
end
The `:where` option may receive a dynamic query, a keyword list or a MFA
(a tuple with a module, function and args to invoke). The MFA is especially
useful to avoid duplication in those definitions:
defmodule Comment do
def deleted_filter do
dynamic([c], not(is_nil(comment.deleted_at)))
end
end
defmodule Post do
use Ecto.Schema
schema "posts" do
has_many :deleted_comments, Comment,
where: {Comment, :deleted_filter, []}
end
end
**Important!** Please use this feature only when strictly necessary,
otherwise it is very easy to end-up with large schemas with dozens of
different associations polluting your schema and affecting your
application performance. For instance, if you are using associations
only for different querying purposes, then it is preferrable to build
and compose queries, rather than defining multiple associations:
posts
|> Ecto.assoc(:comments)
|> Comment.deleted()
Or when preloading:
from posts, preload: [comments: ^Comment.deleted()]
## has_many/has_one :through
Ecto also supports defining associations in terms of other associations
via the `:through` option. Let's see an example:
defmodule Post do
use Ecto.Schema
schema "posts" do
has_many :comments, Comment
has_one :permalink, Permalink
# In the has_many :through example below, the `:comments`
# in the list [:comments, :author] refers to the
# `has_many :comments` in the Post own schema and the
# `:author` refers to the `belongs_to :author` of the
# Comment's schema (the module below).
# (see the description below for more details)
has_many :comments_authors, through: [:comments, :author]
# Specify the association with custom source
has_many :tags, {"posts_tags", Tag}
end
end
defmodule Comment do
use Ecto.Schema
schema "comments" do
belongs_to :author, Author
belongs_to :post, Post
has_one :post_permalink, through: [:post, :permalink]
end
end
In the example above, we have defined a `has_many :through` association
named `:comments_authors`. A `:through` association always expects a list
and the first element of the list must be a previously defined association
in the current module. For example, `:comments_authors` first points to
`:comments` in the same module (Post), which then points to `:author` in
the next schema, `Comment`.
This `:through` association will return all authors for all comments
that belongs to that post:
# Get all comments authors for a given post
post = Repo.get(Post, 42)
authors = Repo.all assoc(post, :comments_authors)
Although we used the `:through` association in the example above, Ecto
also allows developers to dynamically build the through associations using
the `Ecto.assoc/2` function:
assoc(post, [:comments, :author])
In fact, given `:through` associations are read-only, **using the `Ecto.assoc/2`
format is the preferred mechanism for working with through associations**. Use
the schema-based one only if you need to store the through data alongside of
the parent struct, in specific cases such as preloading.
`:through` associations can also be preloaded. In such cases, not only
the `:through` association is preloaded but all intermediate steps are
preloaded too:
[post] = Repo.all(from(p in Post, where: p.id == 42, preload: :comments_authors))
post.comments_authors #=> [%Author{...}, ...]
# The comments for each post will be preloaded too
post.comments #=> [%Comment{...}, ...]
# And the author for each comment too
hd(post.comments).author #=> %Author{...}
When the `:through` association is expected to return one or zero items,
`has_one :through` should be used instead, as in the example at the beginning
of this section:
# How we defined the association above
has_one :post_permalink, through: [:post, :permalink]
# Get a preloaded comment
[comment] = Repo.all(Comment) |> Repo.preload(:post_permalink)
comment.post_permalink #=> %Permalink{...}
"""
defmacro has_many(name, queryable, opts \\ []) do
queryable = expand_alias(queryable, __CALLER__)
quote do
Ecto.Schema.__has_many__(__MODULE__, unquote(name), unquote(queryable), unquote(opts))
end
end
@doc ~S"""
Indicates a one-to-one association with another schema.
The current schema has zero or one records of the other schema. The other
schema often has a `belongs_to` field with the reverse association.
## Options
* `:foreign_key` - Sets the foreign key, this should map to a field on the
other schema, defaults to the underscored name of the current module
suffixed by `_id`
* `:references` - Sets the key on the current schema to be used for the
association, defaults to the primary key on the schema
* `:through` - If this association must be defined in terms of existing
associations. Read the section in `has_many/3` for more information
* `:on_delete` - The action taken on associations when parent record
is deleted. May be `:nothing` (default), `:nilify_all` and `:delete_all`.
Notice `:on_delete` may also be set in migrations when creating a
reference. If supported, relying on the database via migrations
is preferred. `:nilify_all` and `:delete_all` will not cascade to child
records unless set via database migrations.
* `:on_replace` - The action taken on associations when the record is
replaced when casting or manipulating parent changeset. May be
`:raise` (default), `:mark_as_invalid`, `:nilify`, `:update`, or
`:delete`. See `Ecto.Changeset`'s section on related data for more info.
* `:defaults` - Default values to use when building the association
* `:where` - A filter for the association. See "Filtering associations"
in `has_many/3`. It does not apply to `:through` associations.
## Examples
defmodule Post do
use Ecto.Schema
schema "posts" do
has_one :permalink, Permalink
# Specify the association with custom source
has_one :category, {"posts_categories", Category}
end
end
# The permalink can come preloaded on the post struct
[post] = Repo.all(from(p in Post, where: p.id == 42, preload: :permalink))
post.permalink #=> %Permalink{...}
## Using Queries as Associations
A query can also be given instead of a schema. Querying, joining or preloading the association will
use the given query. Currently only where clauses can be provided in queries. Let's see an example:
defmodule Post do
...
def active() do
from post in Post,
where: post.active
end
end
defmodule Comment do
use Ecto.Schema
schema "comments" do
has_one :post, Post.active()
end
end
Note: building the association does not consider the query filters.
For example, if the given query requires the active field of the associated records to be true,
building such association won't automatically set the active field to true.
"""
defmacro has_one(name, queryable, opts \\ []) do
queryable = expand_alias(queryable, __CALLER__)
quote do
Ecto.Schema.__has_one__(__MODULE__, unquote(name), unquote(queryable), unquote(opts))
end
end
@doc ~S"""
Indicates a one-to-one or many-to-one association with another schema.
The current schema belongs to zero or one records of the other schema. The other
schema often has a `has_one` or a `has_many` field with the reverse association.
You should use `belongs_to` in the table that contains the foreign key. Imagine
a company <-> employee relationship. If the employee contains the `company_id` in
the underlying database table, we say the employee belongs to company.
In fact, when you invoke this macro, a field with the name of foreign key is
automatically defined in the schema for you.
## Options
* `:foreign_key` - Sets the foreign key field name, defaults to the name
of the association suffixed by `_id`. For example, `belongs_to :company`
will define foreign key of `:company_id`
* `:references` - Sets the key on the other schema to be used for the
association, defaults to: `:id`
* `:define_field` - When false, does not automatically define a `:foreign_key`
field, implying the user is defining the field manually elsewhere
* `:type` - Sets the type of automatically defined `:foreign_key`.
Defaults to: `:integer` and can be set per schema via `@foreign_key_type`
* `:on_replace` - The action taken on associations when the record is
replaced when casting or manipulating parent changeset. May be
`:raise` (default), `:mark_as_invalid`, `:nilify`, `:update`, or `:delete`.
See `Ecto.Changeset`'s section on related data for more info.
* `:defaults` - Default values to use when building the association
* `:primary_key` - If the underlying belongs_to field is a primary key
* `:source` - The source for the underlying field
* `:where` - A filter for the association. See "Filtering associations"
in `has_many/3`.
## Examples
defmodule Comment do
use Ecto.Schema
schema "comments" do
belongs_to :post, Post
end
end
# The post can come preloaded on the comment record
[comment] = Repo.all(from(c in Comment, where: c.id == 42, preload: :post))
comment.post #=> %Post{...}
If you need custom options on the underlying field, you can define the
field explicitly and then pass `define_field: false` to `belongs_to`:
defmodule Comment do
use Ecto.Schema
schema "comments" do
field :post_id, :integer, ... # custom options
belongs_to :post, Post, define_field: false
end
end
## Using Queries as Associations
A query can also be given instead of a schema. Querying, joining or preloading the association will
use the given query. Currently only where clauses can be provided in queries. Let's see an example:
defmodule Post do
...
def active() do
from post in Post,
where: post.active
end
end
defmodule Comment do
use Ecto.Schema
schema "posts" do
belongs_to :post, Post.active()
end
end
Note: building the association does not consider the query filters.
For example, if the given query requires the active field of the associated records to be true,
building such association won't automatically set the active field to true.
## Polymorphic associations
One common use case for belongs to associations is to handle
polymorphism. For example, imagine you have defined a Comment
schema and you wish to use it for commenting on both tasks and
posts.
Some abstractions would force you to define some sort of
polymorphic association with two fields in your database:
* commentable_type
* commentable_id
The problem with this approach is that it breaks references in
the database. You can't use foreign keys and it is very inefficient,
both in terms of query time and storage.
In Ecto, we have three ways to solve this issue. The simplest
is to define multiple fields in the Comment schema, one for each
association:
* task_id
* post_id
Unless you have dozens of columns, this is simpler for the developer,
more DB friendly and more efficient in all aspects.
Alternatively, because Ecto does not tie a schema to a given table,
we can use separate tables for each association. Let's start over
and define a new Comment schema:
defmodule Comment do
use Ecto.Schema
schema "abstract table: comments" do
# This will be used by associations on each "concrete" table
field :assoc_id, :integer
end
end
Notice we have changed the table name to "abstract table: comments".
You can choose whatever name you want, the point here is that this
particular table will never exist.
Now in your Post and Task schemas:
defmodule Post do
use Ecto.Schema
schema "posts" do
has_many :comments, {"posts_comments", Comment}, foreign_key: :assoc_id
end
end
defmodule Task do
use Ecto.Schema
schema "tasks" do
has_many :comments, {"tasks_comments", Comment}, foreign_key: :assoc_id
end
end
Now each association uses its own specific table, "posts_comments"
and "tasks_comments", which must be created on migrations. The
advantage of this approach is that we never store unrelated data
together, also ensuring we keep database references fast and correct.
When using this technique, the only limitation is that you cannot
build comments directly. For example, the command below
Repo.insert!(%Comment{})
will attempt to use the abstract table. Instead, one should use
Repo.insert!(build_assoc(post, :comments))
leveraging the `Ecto.build_assoc/3` function. You can also
use `Ecto.assoc/2` and `Ecto.Query.assoc/2` in the query syntax
to easily retrieve associated comments to a given post or
task:
# Fetch all comments associated with the given task
Repo.all(assoc(task, :comments))
Or all comments in a given table:
Repo.all from(c in {"posts_comments", Comment}), ...)
The third and final option is to use `many_to_many/3` to
define the relationships between the resources. In this case,
the comments table won't have the foreign key, instead there
is a intermediary table responsible for associating the entries:
defmodule Comment do
use Ecto.Schema
schema "comments" do
# ...
end
end
In your posts and tasks:
defmodule Post do
use Ecto.Schema
schema "posts" do
many_to_many :comments, Comment, join_through: "posts_comments"
end
end
defmodule Task do
use Ecto.Schema
schema "tasks" do
many_to_many :comments, Comment, join_through: "tasks_comments"
end
end
See `many_to_many/3` for more information on this particular approach.
"""
defmacro belongs_to(name, queryable, opts \\ []) do
queryable = expand_alias(queryable, __CALLER__)
quote do
Ecto.Schema.__belongs_to__(__MODULE__, unquote(name), unquote(queryable), unquote(opts))
end
end
@doc ~S"""
Indicates a many-to-many association with another schema.
The association happens through a join schema or source, containing
foreign keys to the associated schemas. For example, the association
below:
# from MyApp.Post
many_to_many :tags, MyApp.Tag, join_through: "posts_tags"
is backed by relational databases through a join table as follows:
[Post] <-> [posts_tags] <-> [Tag]
id <-- post_id
tag_id --> id
More information on the migration for creating such a schema is shown
below.
## Options
* `:join_through` - specifies the source of the associated data.
It may be a string, like "posts_tags", representing the
underlying storage table or an atom, like `MyApp.PostTag`,
representing a schema. This option is required.
* `:join_keys` - specifies how the schemas are associated. It
expects a keyword list with two entries, the first being how
the join table should reach the current schema and the second
how the join table should reach the associated schema. In the
example above, it defaults to: `[post_id: :id, tag_id: :id]`.
The keys are inflected from the schema names.
* `:on_delete` - The action taken on associations when the parent record
is deleted. May be `:nothing` (default) or `:delete_all`.
`:delete_all` will only remove data from the join source, never the
associated records. Notice `:on_delete` may also be set in migrations
when creating a reference. If supported, relying on the database via
migrations is preferred. `:nilify_all` and `:delete_all` will not cascade
to child records unless set via database migrations.
* `:on_replace` - The action taken on associations when the record is
replaced when casting or manipulating parent changeset. May be
`:raise` (default), `:mark_as_invalid`, or `:delete`.
`:delete` will only remove data from the join source, never the
associated records. See `Ecto.Changeset`'s section on related data
for more info.
* `:defaults` - Default values to use when building the association
* `:unique` - When true, checks if the associated entries are unique.
This is done by checking the primary key of the associated entries during
repository operations. Keep in mind this does not guarantee uniqueness at the
database level. For such it is preferred to set a unique index in the database.
For example: `create unique_index(:posts_tags, [:post_id, :tag_id])`
* `:where` - A filter for the association. See "Filtering associations"
in `has_many/3`
* `:join_through_where` - A filter for the join through association.
See "Filtering associations" in `has_many/3`
## Removing data
If you attempt to remove associated `many_to_many` data, **Ecto will
always remove data from the join schema and never from the target
associations** be it by setting `:on_replace` to `:delete`, `:on_delete`
to `:delete_all` or by using changeset functions such as
`Ecto.Changeset.put_assoc/3`. For example, if a `Post` has a many to many
relationship with `Tag`, setting `:on_delete` to `:delete_all` will
only delete entries from the "posts_tags" table in case `Post` is
deleted.
## Migration
How your migration should be structured depends on the value you pass
in `:join_through`. If `:join_through` is simply a string, representing
a table, you may define a table without primary keys and you must not
include any further columns, as those values won't be set by Ecto:
create table(:posts_tags, primary_key: false) do
add :post_id, references(:posts)
add :tag_id, references(:tags)
end
However, if your `:join_through` is a schema, like `MyApp.PostTag`, your
join table may be structured as any other table in your codebase,
including timestamps:
create table(:posts_tags) do
add :post_id, references(:posts)
add :tag_id, references(:tags)
timestamps()
end
Because `:join_through` contains a schema, in such cases, autogenerated
values and primary keys will be automatically handled by Ecto.
## Examples
defmodule Post do
use Ecto.Schema
schema "posts" do
many_to_many :tags, Tag, join_through: "posts_tags"
end
end
# Let's create a post and a tag
post = Repo.insert!(%Post{})
tag = Repo.insert!(%Tag{name: "introduction"})
# We can associate at any time post and tags together using changesets
post
|> Repo.preload(:tags) # Load existing data
|> Ecto.Changeset.change() # Build the changeset
|> Ecto.Changeset.put_assoc(:tags, [tag]) # Set the association
|> Repo.update!
# In a later moment, we may get all tags for a given post
post = Repo.get(Post, 42)
tags = Repo.all(assoc(post, :tags))
# The tags may also be preloaded on the post struct for reading
[post] = Repo.all(from(p in Post, where: p.id == 42, preload: :tags))
post.tags #=> [%Tag{...}, ...]
## Join Schema Example
You may prefer to use a join schema to handle many_to_many associations. The
decoupled nature of Ecto allows us to create a "join" struct which
`belongs_to` both sides of the many to many association.
In our example, a User has and belongs to many Organizations
defmodule UserOrganization do
use Ecto.Schema
@primary_key false
schema "users_organizations" do
belongs_to :user, User
belongs_to :organization, Organization
timestamps() # Added bonus, a join schema will also allow you to set timestamps
end
def changeset(struct, params \\ %{}) do
struct
|> Ecto.Changeset.cast(params, [:user_id, :organization_id])
|> Ecto.Changeset.validate_required([:user_id, :organization_id])
# Maybe do some counter caching here!
end
end
defmodule User do
use Ecto.Schema
schema "users" do
many_to_many :organizations, Organization, join_through: UserOrganization
end
end
defmodule Organization do
use Ecto.Schema
schema "organizations" do
many_to_many :users, User, join_through: UserOrganization
end
end
# Then to create the association, pass in the ID's of an existing
# User and Organization to UserOrganization.changeset
changeset = UserOrganization.changeset(%UserOrganization{}, %{user_id: id, organization_id: id})
case Repo.insert(changeset) do
{:ok, assoc} -> # Assoc was created!
{:error, changeset} -> # Handle the error
end
## Using Queries as Associations
A query can also be given instead of a schema, both for the join_through and the destination.
Querying, joining or preloading the association will use the given query. Currently only where
clauses can be provided in queries. Let's see an example:
defmodule UserOrganization do
use Ecto.Schema
@primary_key false
schema "users_organizations" do
belongs_to :user, User
belongs_to :organization, Organization
field :deleted, :boolean
timestamps() # Added bonus, a join schema will also allow you to set timestamps
end
def active() do
from user_organization in UserOrganization,
where: is_nil(user_organization.deleted)
end
end
defmodule User do
use Ecto.Schema
schema "users" do
many_to_many :organizations, Organization, join_through: UserOrganization
field :banned, :boolean
end
def not_banned() do
from user in User,
where: not(user.banned)
end
end
defmodule Organization do
use Ecto.Schema
schema "organizations" do
many_to_many :users, User.not_banned(), join_through: UserOrganization.active()
end
end
Note: building the association does not consider the query filters.
For example, if the given query requires the active field of the associated records to be true,
building such association won't automatically set the active field to true.
"""
defmacro many_to_many(name, queryable, opts \\ []) do
queryable = expand_alias(queryable, __CALLER__)
quote do
Ecto.Schema.__many_to_many__(__MODULE__, unquote(name), unquote(queryable), unquote(opts))
end
end
## Embeds
@doc ~S"""
Indicates an embedding of a schema.
The current schema has zero or one records of the other schema embedded
inside of it. It uses a field similar to the `:map` type for storage,
but allows embeds to have all the things regular schema can.
You must declare your `embeds_one/3` field with type `:map` at the
database level.
The embedded may or may not have a primary key. Ecto use the primary keys
to detect if an embed is being updated or not. If a primary is not present,
`:on_replace` should be set to either `:update` or `:delete` if there is a
desire to either update or delete the current embed when a new one is set.
## Options
* `:on_replace` - The action taken on associations when the embed is
replaced when casting or manipulating parent changeset. May be
`:raise` (default), `:mark_as_invalid`, `:update`, or `:delete`.
See `Ecto.Changeset`'s section on related data for more info.
## Examples
defmodule Order do
use Ecto.Schema
schema "orders" do
embeds_one :item, Item
end
end
defmodule Item do
use Ecto.Schema
embedded_schema do
field :title
end
end
# The item is loaded with the order
order = Repo.get!(Order, 42)
order.item #=> %Item{...}
Adding and removal of embeds can only be done via the `Ecto.Changeset`
API so Ecto can properly track the embed life-cycle:
order = Repo.get!(Order, 42)
item = %Item{title: "Soap"}
# Generate a changeset
changeset = Ecto.Changeset.change(order)
# Put a new embed to the changeset
changeset = Ecto.Changeset.put_embed(changeset, :item, item)
# Update the order, and fetch the item
item = Repo.update!(changeset).item
# Item is generated with a unique identification
item
# => %Item{id: "20a97d94-f79b-4e63-a875-85deed7719b7", title: "Soap"}
## Inline embedded schema
The schema module can be defined inline in the parent schema in simple
cases:
defmodule Parent do
use Ecto.Schema
schema "parents" do
field :name, :string
embeds_one :child, Child do
field :name, :string
field :age, :integer
end
end
end
When defining an inline embed, the `:primary_key` option may be given to
customize the embed primary key type.
Defining embedded schema in such a way will define a `Parent.Child` module
with the appropriate struct. In order to properly cast the embedded schema.
When casting the inline-defined embedded schemas you need to use the `:with`
option of `cast_embed/3` to provide the proper function to do the casting.
For example:
def changeset(schema, params) do
schema
|> cast(params, [:name])
|> cast_embed(:child, with: &child_changeset/2)
end
defp child_changeset(schema, params) do
schema
|> cast(params, [:name, :age])
end
## Encoding and decoding
Because many databases do not support direct encoding and decoding
of embeds, it is often emulated by Ecto by using specific encoding
and decoding rules.
For example, PostgreSQL will store embeds on top of JSONB columns,
which means types in embedded schemas won't go through the usual
dump->DB->load cycle but rather encode->DB->decode->cast. This means
that, when using embedded schemas with databases like PG or MySQL,
make sure all of your types can be JSON encoded/decoded correctly.
Ecto provides this guarantee for all built-in types.
"""
defmacro embeds_one(name, schema, opts \\ [])
defmacro embeds_one(name, schema, do: block) do
quote do
embeds_one(unquote(name), unquote(schema), [], do: unquote(block))
end
end
defmacro embeds_one(name, schema, opts) do
schema = expand_alias(schema, __CALLER__)
quote do
Ecto.Schema.__embeds_one__(__MODULE__, unquote(name), unquote(schema), unquote(opts))
end
end
@doc """
Indicates an embedding of a schema.
For options and examples see documentation of `embeds_one/3`.
"""
defmacro embeds_one(name, schema, opts, do: block) do
quote do
{schema, opts} = Ecto.Schema.__embeds_module__(__ENV__, unquote(schema), unquote(opts), unquote(Macro.escape(block)))
Ecto.Schema.__embeds_one__(__MODULE__, unquote(name), schema, opts)
end
end
@doc ~S"""
Indicates an embedding of many schemas.
The current schema has zero or more records of the other schema embedded
inside of it. Embeds have all the things regular schemas have.
It is recommended to declare your `embeds_many/3` field with type `:map`
and a default of `"[]"` (although Ecto will also automatically translate
`nil` values from the database into empty lists).
The embedded may or may not have a primary key. Ecto use the primary keys
to detect if an embed is being updated or not. If a primary is not present
and you still want the list of embeds to be updated, `:on_replace` must be
set to `:delete`, forcing all current embeds to be deleted and replaced by
new ones whenever a new list of embeds is set.
For encoding and decoding of embeds, please read the docs for
`embeds_one/3`.
## Options
* `:on_replace` - The action taken on associations when the embed is
replaced when casting or manipulating parent changeset. May be
`:raise` (default), `:mark_as_invalid`, or `:delete`.
See `Ecto.Changeset`'s section on related data for more info.
## Examples
defmodule Order do
use Ecto.Schema
schema "orders" do
embeds_many :items, Item
end
end
defmodule Item do
use Ecto.Schema
embedded_schema do
field :title
end
end
# The items are loaded with the order
order = Repo.get!(Order, 42)
order.items #=> [%Item{...}, ...]
Adding and removal of embeds can only be done via the `Ecto.Changeset`
API so Ecto can properly track the embed life-cycle:
# Order has no items
order = Repo.get!(Order, 42)
order.items
# => []
items = [%Item{title: "Soap"}]
# Generate a changeset
changeset = Ecto.Changeset.change(order)
# Put a one or more new items
changeset = Ecto.Changeset.put_embed(changeset, :items, items)
# Update the order and fetch items
items = Repo.update!(changeset).items
# Items are generated with a unique identification
items
# => [%Item{id: "20a97d94-f79b-4e63-a875-85deed7719b7", title: "Soap"}]
Updating of embeds must be done using a changeset for each changed embed.
# Order has an existing items
order = Repo.get!(Order, 42)
order.items
# => [%Item{id: "20a97d94-f79b-4e63-a875-85deed7719b7", title: "Soap"}]
# Generate a changeset
changeset = Ecto.Changeset.change(order)
# Put the updated item as a changeset
current_item = List.first(order.items)
item_changeset = Ecto.Changeset.change(current_item, title: "Mujju's Soap")
order_changeset = Ecto.Changeset.put_embed(changeset, :items, [item_changeset])
# Update the order and fetch items
items = Repo.update!(order_changeset).items
# Item has the updated title
items
# => [%Item{id: "20a97d94-f79b-4e63-a875-85deed7719b7", title: "Mujju's Soap"}]
## Inline embedded schema
The schema module can be defined inline in the parent schema in simple
cases:
defmodule Parent do
use Ecto.Schema
schema "parents" do
field :name, :string
embeds_many :children, Child do
field :name, :string
field :age, :integer
end
end
end
When defining an inline embed, the `:primary_key` option may be given to
customize the embed primary key type.
Defining embedded schema in such a way will define a `Parent.Child` module
with the appropriate struct. In order to properly cast the embedded schema.
When casting the inline-defined embedded schemas you need to use the `:with`
option of `cast_embed/3` to provide the proper function to do the casting.
For example:
def changeset(schema, params) do
schema
|> cast(params, [:name])
|> cast_embed(:children, with: &child_changeset/2)
end
defp child_changeset(schema, params) do
schema
|> cast(params, [:name, :age])
end
"""
defmacro embeds_many(name, schema, opts \\ [])
defmacro embeds_many(name, schema, do: block) do
quote do
embeds_many(unquote(name), unquote(schema), [], do: unquote(block))
end
end
defmacro embeds_many(name, schema, opts) do
schema = expand_alias(schema, __CALLER__)
quote do
Ecto.Schema.__embeds_many__(__MODULE__, unquote(name), unquote(schema), unquote(opts))
end
end
@doc """
Indicates an embedding of many schemas.
For options and examples see documentation of `embeds_many/3`.
"""
defmacro embeds_many(name, schema, opts, do: block) do
quote do
{schema, opts} = Ecto.Schema.__embeds_module__(__ENV__, unquote(schema), unquote(opts), unquote(Macro.escape(block)))
Ecto.Schema.__embeds_many__(__MODULE__, unquote(name), schema, opts)
end
end
@doc """
Internal function for integrating associations into schemas.
This function exists as an extension point for libraries to
add new types of associations to Ecto. For the existing APIs,
see `belongs_to/3`, `has_many/3`, `has_one/3` and `many_to_many/3`.
This function expects the current schema, the association cardinality,
the association name, the association module (that implements
`Ecto.Association` callbacks) and a keyword list of options.
"""
@spec association(module, :one | :many, atom(), module, Keyword.t) :: Ecto.Association.t
def association(schema, cardinality, name, association, opts) do
not_loaded = %Ecto.Association.NotLoaded{
__owner__: schema,
__field__: name,
__cardinality__: cardinality
}
put_struct_field(schema, name, not_loaded)
opts = [cardinality: cardinality] ++ opts
struct = association.struct(schema, name, opts)
Module.put_attribute(schema, :ecto_assocs, {name, struct})
struct
end
## Callbacks
@doc false
def __timestamps__(:naive_datetime) do
%{NaiveDateTime.utc_now() | microsecond: {0, 0}}
end
def __timestamps__(:naive_datetime_usec) do
NaiveDateTime.utc_now()
end
def __timestamps__(:utc_datetime) do
DateTime.from_unix!(System.system_time(:second), :second)
end
def __timestamps__(:utc_datetime_usec) do
DateTime.from_unix!(System.system_time(:microsecond), :microsecond)
end
def __timestamps__(type) do
type.from_unix!(System.system_time(:microsecond), :microsecond)
end
@doc false
# Loads data into struct by assumes fields are properly
# named and belongs to the struct. Types and values are
# zipped together in one pass as they are loaded.
def __safe_load__(struct, types, values, prefix, source, loader) do
zipped = safe_load_zip(types, values, struct, loader)
case Map.merge(struct, Map.new(zipped)) do
%{__meta__: %Metadata{} = metadata} = struct ->
metadata = %{metadata | state: :loaded, source: source, prefix: prefix}
Map.put(struct, :__meta__, metadata)
map ->
map
end
end
defp safe_load_zip([{field, type} | fields], [value | values], struct, loader) do
[{field, load!(struct, field, type, value, loader)} |
safe_load_zip(fields, values, struct, loader)]
end
defp safe_load_zip([], [], _struct, _loader) do
[]
end
@doc false
# Assumes data does not all belongs to schema/struct
# and that it may also require source-based renaming.
def __unsafe_load__(schema, data, loader) do
types = schema.__schema__(:load)
struct = schema.__struct__()
case __unsafe_load__(struct, types, data, loader) do
%{__meta__: %Metadata{} = metadata} = struct ->
Map.put(struct, :__meta__, %{metadata | state: :loaded})
map ->
map
end
end
@doc false
def __unsafe_load__(struct, types, map, loader) when is_map(map) do
Enum.reduce(types, struct, fn pair, acc ->
{field, source, type} = field_source_and_type(pair)
case fetch_string_or_atom_field(map, source) do
{:ok, value} -> Map.put(acc, field, load!(struct, field, type, value, loader))
:error -> acc
end
end)
end
@compile {:inline, field_source_and_type: 1, fetch_string_or_atom_field: 2}
defp field_source_and_type({field, {:source, source, type}}) do
{field, source, type}
end
defp field_source_and_type({field, type}) do
{field, field, type}
end
defp fetch_string_or_atom_field(map, field) when is_atom(field) do
case Map.fetch(map, Atom.to_string(field)) do
{:ok, value} -> {:ok, value}
:error -> Map.fetch(map, field)
end
end
defp load!(struct, field, type, value, loader) do
case loader.(type, value) do
{:ok, value} ->
value
:error ->
raise ArgumentError, "cannot load `#{inspect value}` as type #{inspect type} " <>
"for field `#{field}`#{error_data(struct)}"
end
end
defp error_data(%{__struct__: atom}) do
" in schema #{inspect atom}"
end
defp error_data(other) when is_map(other) do
""
end
@doc false
def __field__(mod, name, type, opts) do
check_field_type!(name, type, opts)
define_field(mod, name, type, opts)
end
defp define_field(mod, name, type, opts) do
virtual? = opts[:virtual] || false
pk? = opts[:primary_key] || false
Module.put_attribute(mod, :changeset_fields, {name, type})
put_struct_field(mod, name, Keyword.get(opts, :default))
unless virtual? do
source = opts[:source] || Module.get_attribute(mod, :field_source_mapper).(name)
if name != source do
Module.put_attribute(mod, :ecto_field_sources, {name, source})
end
if raw = opts[:read_after_writes] do
Module.put_attribute(mod, :ecto_raw, name)
end
case gen = opts[:autogenerate] do
{_, _, _} ->
store_mfa_autogenerate!(mod, name, type, gen)
true ->
store_type_autogenerate!(mod, name, source || name, type, pk?)
_ ->
:ok
end
if raw && gen do
raise ArgumentError, "cannot mark the same field as autogenerate and read_after_writes"
end
if pk? do
Module.put_attribute(mod, :ecto_primary_keys, name)
end
Module.put_attribute(mod, :ecto_fields, {name, type})
end
end
@valid_has_options [:foreign_key, :references, :through, :on_delete, :defaults, :on_replace, :where]
@doc false
def __has_many__(mod, name, queryable, opts) do
check_options!(opts, @valid_has_options, "has_many/3")
if is_list(queryable) and Keyword.has_key?(queryable, :through) do
association(mod, :many, name, Ecto.Association.HasThrough, queryable)
else
struct =
association(mod, :many, name, Ecto.Association.Has, [queryable: queryable] ++ opts)
Module.put_attribute(mod, :changeset_fields, {name, {:assoc, struct}})
end
end
@doc false
def __has_one__(mod, name, queryable, opts) do
check_options!(opts, @valid_has_options, "has_one/3")
if is_list(queryable) and Keyword.has_key?(queryable, :through) do
association(mod, :one, name, Ecto.Association.HasThrough, queryable)
else
struct =
association(mod, :one, name, Ecto.Association.Has, [queryable: queryable] ++ opts)
Module.put_attribute(mod, :changeset_fields, {name, {:assoc, struct}})
end
end
# :primary_key is valid here to support associative entity
# https://en.wikipedia.org/wiki/Associative_entity
@valid_belongs_to_options [:foreign_key, :references, :define_field, :type,
:on_replace, :defaults, :primary_key, :source, :where]
@doc false
def __belongs_to__(mod, name, queryable, opts) do
check_options!(opts, @valid_belongs_to_options, "belongs_to/3")
opts = Keyword.put_new(opts, :foreign_key, :"#{name}_id")
foreign_key_type = opts[:type] || Module.get_attribute(mod, :foreign_key_type)
if name == Keyword.get(opts, :foreign_key) do
raise ArgumentError, "foreign_key #{inspect name} must be distinct from corresponding association name"
end
if Keyword.get(opts, :define_field, true) do
__field__(mod, opts[:foreign_key], foreign_key_type, opts)
end
struct =
association(mod, :one, name, Ecto.Association.BelongsTo, [queryable: queryable] ++ opts)
Module.put_attribute(mod, :changeset_fields, {name, {:assoc, struct}})
end
@valid_many_to_many_options [:join_through, :join_keys, :on_delete, :defaults, :on_replace,
:unique, :where, :join_through_where]
@doc false
def __many_to_many__(mod, name, queryable, opts) do
check_options!(opts, @valid_many_to_many_options, "many_to_many/3")
struct =
association(mod, :many, name, Ecto.Association.ManyToMany, [queryable: queryable] ++ opts)
Module.put_attribute(mod, :changeset_fields, {name, {:assoc, struct}})
end
@valid_embeds_one_options [:strategy, :on_replace, :source]
@doc false
def __embeds_one__(mod, name, schema, opts) do
check_options!(opts, @valid_embeds_one_options, "embeds_one/3")
embed(mod, :one, name, schema, opts)
end
@valid_embeds_many_options [:strategy, :on_replace, :source]
@doc false
def __embeds_many__(mod, name, schema, opts) do
check_options!(opts, @valid_embeds_many_options, "embeds_many/3")
opts = Keyword.put(opts, :default, [])
embed(mod, :many, name, schema, opts)
end
@doc false
def __embeds_module__(env, name, opts, block) do
{pk, opts} = Keyword.pop(opts, :primary_key, {:id, :binary_id, autogenerate: true})
block =
quote do
use Ecto.Schema
@primary_key unquote(Macro.escape(pk))
embedded_schema do
unquote(block)
end
end
module = Module.concat(env.module, name)
Module.create(module, block, env)
{module, opts}
end
## Quoted callbacks
@doc false
def __after_compile__(%{module: module} = env, _) do
# If we are compiling code, we can validate associations now,
# as the Elixir compiler will solve dependnecies.
#
# TODO: This is a hack, don't do this at home, it may break any time.
# Instead Elixir should provide a proper API to check for compilation.
if Process.info(self(), :error_handler) == {:error_handler, Kernel.ErrorHandler} do
for name <- module.__schema__(:associations) do
assoc = module.__schema__(:association, name)
case assoc.__struct__.after_compile_validation(assoc, env) do
:ok ->
:ok
{:error, message} ->
IO.warn "invalid association `#{assoc.field}` in schema #{inspect module}: #{message}",
Macro.Env.stacktrace(env)
end
end
end
:ok
end
@doc false
def __schema__(fields, field_sources, assocs, embeds) do
load =
for {name, type} <- fields do
if alias = field_sources[name] do
{name, {:source, alias, type}}
else
{name, type}
end
end
dump =
for {name, type} <- fields do
{name, {field_sources[name] || name, type}}
end
field_sources_quoted =
for {name, _type} <- fields do
{[:field_source, name], field_sources[name] || name}
end
types_quoted =
for {name, type} <- fields do
{[:type, name], Macro.escape(type)}
end
source_types_quoted =
for {name, type} <- fields do
{[:source_type, field_sources[name] || name], Macro.escape(type)}
end
assoc_quoted =
for {name, refl} <- assocs do
{[:association, name], Macro.escape(refl)}
end
assoc_names = Enum.map(assocs, &elem(&1, 0))
embed_quoted =
for {name, refl} <- embeds do
{[:embed, name], Macro.escape(refl)}
end
embed_names = Enum.map(embeds, &elem(&1, 0))
single_arg = [
{[:dump], dump |> Map.new() |> Macro.escape()},
{[:load], load |> Macro.escape()},
{[:associations], assoc_names},
{[:embeds], embed_names}
]
catch_all = [
{[:field_source, quote(do: _)], nil},
{[:type, quote(do: _)], nil},
{[:source_type, quote(do: _)], nil},
{[:association, quote(do: _)], nil},
{[:embed, quote(do: _)], nil}
]
[
single_arg,
field_sources_quoted,
types_quoted,
source_types_quoted,
assoc_quoted,
embed_quoted,
catch_all
]
end
## Private
defp embed(mod, cardinality, name, schema, opts) do
opts = [cardinality: cardinality, related: schema] ++ opts
struct = Ecto.Embedded.struct(mod, name, opts)
define_field(mod, name, {:embed, struct}, opts)
Module.put_attribute(mod, :ecto_embeds, {name, struct})
end
defp put_struct_field(mod, name, assoc) do
fields = Module.get_attribute(mod, :struct_fields)
if List.keyfind(fields, name, 0) do
raise ArgumentError, "field/association #{inspect name} is already set on schema"
end
Module.put_attribute(mod, :struct_fields, {name, assoc})
end
defp check_options!(opts, valid, fun_arity) do
case Enum.find(opts, fn {k, _} -> not(k in valid) end) do
{k, _} -> raise ArgumentError, "invalid option #{inspect k} for #{fun_arity}"
nil -> :ok
end
end
defp check_field_type!(name, :datetime, _opts) do
raise ArgumentError, "invalid type :datetime for field #{inspect name}. " <>
"You probably meant to choose one between :naive_datetime " <>
"(no time zone information) or :utc_datetime (time zone is set to UTC)"
end
defp check_field_type!(name, {:embed, _}, _opts) do
raise ArgumentError, "cannot declare field #{inspect name} as embed. Use embeds_one/many instead"
end
defp check_field_type!(name, type, opts) do
cond do
type == :any and !opts[:virtual] ->
raise ArgumentError, "only virtual fields can have type :any, " <>
"invalid type for field #{inspect name}"
Ecto.Type.primitive?(type) ->
type
is_atom(type) and Code.ensure_compiled?(type) and function_exported?(type, :type, 0) ->
type
is_atom(type) and function_exported?(type, :__schema__, 1) ->
raise ArgumentError,
"schema #{inspect type} is not a valid type for field #{inspect name}." <>
" Did you mean to use belongs_to, has_one, has_many, embeds_one, or embeds_many instead?"
true ->
raise ArgumentError, "invalid or unknown type #{inspect type} for field #{inspect name}"
end
end
defp store_mfa_autogenerate!(mod, name, type, mfa) do
if autogenerate_id(type) do
raise ArgumentError, ":autogenerate with {m, f, a} not supported by ID types"
else
Module.put_attribute(mod, :ecto_autogenerate, {name, mfa})
end
end
defp store_type_autogenerate!(mod, name, source, type, pk?) do
cond do
id = autogenerate_id(type) ->
cond do
not pk? ->
raise ArgumentError, "only primary keys allow :autogenerate for type #{inspect type}, " <>
"field #{inspect name} is not a primary key"
Module.get_attribute(mod, :ecto_autogenerate_id) ->
raise ArgumentError, "only one primary key with ID type may be marked as autogenerated"
true ->
Module.put_attribute(mod, :ecto_autogenerate_id, {name, source, id})
end
Ecto.Type.primitive?(type) ->
raise ArgumentError, "field #{inspect name} does not support :autogenerate because it uses a " <>
"primitive type #{inspect type}"
# Note the custom type has already been loaded in check_type!/3
not function_exported?(type, :autogenerate, 0) ->
raise ArgumentError, "field #{inspect name} does not support :autogenerate because it uses a " <>
"custom type #{inspect type} that does not define autogenerate/0"
true ->
Module.put_attribute(mod, :ecto_autogenerate, {name, {type, :autogenerate, []}})
end
end
defp autogenerate_id(type) do
id = if Ecto.Type.primitive?(type), do: type, else: type.type
if id in [:id, :binary_id], do: id, else: nil
end
defp expand_alias({:__aliases__, _, _} = ast, env),
do: Macro.expand(ast, %{env | function: {:__schema__, 2}})
defp expand_alias(ast, _env),
do: ast
end
| 34.627387 | 123 | 0.666272 |
7342820dfb1dd5f5df7f48d5b02e157397000cb9 | 338 | ex | Elixir | lib/web/views/profile_view.ex | christhekeele/ex_venture | 4f4b329f50a133e219969f9823144a4cb9bf738d | [
"MIT"
] | 610 | 2017-08-09T15:20:25.000Z | 2022-03-27T15:49:07.000Z | lib/web/views/profile_view.ex | christhekeele/ex_venture | 4f4b329f50a133e219969f9823144a4cb9bf738d | [
"MIT"
] | 69 | 2017-09-23T04:02:30.000Z | 2022-03-19T21:08:21.000Z | lib/web/views/profile_view.ex | christhekeele/ex_venture | 4f4b329f50a133e219969f9823144a4cb9bf738d | [
"MIT"
] | 85 | 2017-09-23T04:07:11.000Z | 2021-11-20T06:44:56.000Z | defmodule Web.ProfileView do
use Web, :view
alias ExVenture.Users.Avatar
alias Web.FormView
def avatar?(user), do: user.avatar_key != nil
def avatar_img(user) do
link(to: Stein.Storage.url(Avatar.avatar_path(user, "original"))) do
img_tag(Stein.Storage.url(Avatar.avatar_path(user, "thumbnail")))
end
end
end
| 22.533333 | 72 | 0.710059 |
7342867647ff9e466f9cf66a6b9214950db4c787 | 5,546 | exs | Elixir | test/rfx/ops/credo/multi_alias_test.exs | pcorey/rfx | db5be95d93b7aba0cf9799db273d8583c21bfc26 | [
"MIT"
] | 31 | 2021-05-29T22:57:04.000Z | 2022-03-13T16:24:57.000Z | test/rfx/ops/credo/multi_alias_test.exs | pcorey/rfx | db5be95d93b7aba0cf9799db273d8583c21bfc26 | [
"MIT"
] | 4 | 2021-06-04T23:34:38.000Z | 2021-07-16T16:01:20.000Z | test/rfx/ops/credo/multi_alias_test.exs | pcorey/rfx | db5be95d93b7aba0cf9799db273d8583c21bfc26 | [
"MIT"
] | 4 | 2021-06-11T13:10:04.000Z | 2022-02-11T13:33:16.000Z | defmodule Rfx.Ops.Credo.MultiAliasTest do
use ExUnit.Case
alias Rfx.Ops.Credo.MultiAlias
alias Rfx.Util.Source
alias Rfx.Util.Tst
@base_source """
alias Foo.{Bar, Baz.Qux}
"""
@base_expected """
alias Foo.Bar
alias Foo.Baz.Qux
"""
@base_diff """
1c1,2
< alias Foo.{Bar, Baz.Qux}
---
> alias Foo.Bar
> alias Foo.Baz.Qux
"""
doctest MultiAlias
describe "#rfx_code" do
test "expected fields" do
[changereq | _] = MultiAlias.cl_code(@base_source)
refute changereq |> Map.get(:file_req)
assert changereq |> Map.get(:text_req)
assert changereq |> Map.get(:text_req) |> Map.get(:diff)
assert changereq |> Map.get(:text_req) |> Map.get(:input_text)
end
test "diff generation" do
[changereq | _] = MultiAlias.cl_code(@base_source)
diff = Map.get(changereq, :text_req) |> Map.get(:diff)
assert diff == @base_diff
end
test "patching" do
[changereq | _] = MultiAlias.cl_code(@base_source)
code = Map.get(changereq, :text_req) |> Map.get(:input_text)
diff = Map.get(changereq, :text_req) |> Map.get(:diff)
new_code = Source.patch(code, diff)
assert new_code == @base_expected
end
test "no change required source" do
changeset = MultiAlias.cl_code(@base_expected)
assert changeset == []
end
end
describe "#rfx_file! with source file" do
test "changeset length" do
file = Tst.gen_file(@base_source)
changeset = MultiAlias.cl_file(file)
assert [_single_item] = changeset
end
test "changereq fields" do
file = Tst.gen_file(@base_source)
[changereq| _ ] = MultiAlias.cl_file(file)
refute changereq |> Map.get(:file_req)
assert changereq |> Map.get(:text_req)
assert changereq |> Map.get(:text_req) |> Map.get(:diff)
assert changereq |> Map.get(:text_req) |> Map.get(:input_file)
end
test "diff generation" do
file = Tst.gen_file(@base_source)
[changereq | _] = MultiAlias.cl_file(file)
diff = Map.get(changereq, :text_req) |> Map.get(:diff)
assert diff == @base_diff
end
test "patching" do
file = Tst.gen_file(@base_source)
[changereq | _] = MultiAlias.cl_file(file)
code = Map.get(changereq, :text_req) |> Map.get(:input_file) |> File.read() |> elem(1)
diff = Map.get(changereq, :text_req) |> Map.get(:diff)
new_code = Source.patch(code, diff)
assert new_code == @base_expected
end
end
describe "#rfx_file! with keyword list" do
test "changeset length" do
file = Tst.gen_file(@base_source)
changeset = MultiAlias.cl_file(file)
assert [_single_item] = changeset
end
end
describe "#rfx_project!" do
test "changeset length" do
root_dir = Tst.gen_proj("mix new")
changeset = MultiAlias.cl_project(root_dir)
assert Enum.empty?(changeset)
end
test "changereq fields" do
file = Tst.gen_file(@base_source)
[changereq| _ ] = MultiAlias.cl_file(file)
refute changereq |> Map.get(:file_req)
assert changereq |> Map.get(:text_req)
assert changereq |> Map.get(:text_req) |> Map.get(:diff)
assert changereq |> Map.get(:text_req) |> Map.get(:input_file)
end
test "diff generation" do
file = Tst.gen_file(@base_source)
[changereq | _] = MultiAlias.cl_file(file)
diff = Map.get(changereq, :text_req) |> Map.get(:diff)
assert diff == @base_diff
end
test "patching" do
file = Tst.gen_file(@base_source)
[changereq | _] = MultiAlias.cl_file(file)
code = Map.get(changereq, :text_req) |> Map.get(:input_file) |> File.read() |> elem(1)
diff = Map.get(changereq, :text_req) |> Map.get(:diff)
new_code = Source.patch(code, diff)
assert new_code == @base_expected
end
end
describe "#rfx_tmpfile" do
test "expected fields for source file" do
file = Tst.gen_file(@base_source)
[changereq | _] = MultiAlias.cl_tmpfile(file)
refute changereq |> Map.get(:file_req)
assert changereq |> Map.get(:text_req)
assert changereq |> Map.get(:text_req) |> Map.get(:diff)
assert changereq |> Map.get(:text_req) |> Map.get(:input_file)
end
test "diff generation" do
file = Tst.gen_file(@base_source)
[changereq | _] = MultiAlias.cl_tmpfile(file)
diff = Map.get(changereq, :text_req) |> Map.get(:diff)
assert diff == @base_diff
end
test "patching" do
file = Tst.gen_file(@base_source)
[changereq | _] = MultiAlias.cl_tmpfile(file)
code = Map.get(changereq, :text_req) |> Map.get(:input_file) |> File.read() |> elem(1)
diff = Map.get(changereq, :text_req) |> Map.get(:diff)
new_code = Source.patch(code, diff)
assert new_code == @base_expected
end
test "no change required code" do
file = Tst.gen_file(@base_expected)
assert [] == MultiAlias.cl_tmpfile(file)
end
test "no change required ingested code" do
root = Tst.gen_proj("mix new")
proj = root |> String.split("/") |> Enum.reverse() |> Enum.at(0)
file = root <> "/lib/#{proj}.ex"
{:ok, code} = File.read(file)
assert [] == MultiAlias.cl_code(code)
end
test "no change required file" do
root_dir = Tst.gen_proj("mix new")
proj = root_dir |> String.split("/") |> Enum.reverse() |> Enum.at(0)
file = root_dir <> "/lib/#{proj}.ex"
changeset = MultiAlias.cl_tmpfile(file)
assert changeset == []
end
end
end
| 30.811111 | 92 | 0.626758 |
7342bdde1d669963bece0e20797d4e16e4a0db51 | 124 | ex | Elixir | lib/cain/endpoint/variable_instance.ex | pfitz/cain | c1038a9d80e51213ee665149c585231b16c11b3f | [
"Apache-2.0"
] | 6 | 2019-12-17T12:40:19.000Z | 2022-01-18T07:25:20.000Z | lib/cain/endpoint/variable_instance.ex | pfitz/cain | c1038a9d80e51213ee665149c585231b16c11b3f | [
"Apache-2.0"
] | 9 | 2019-12-18T13:01:11.000Z | 2021-02-12T14:24:43.000Z | lib/cain/endpoint/variable_instance.ex | pfitz/cain | c1038a9d80e51213ee665149c585231b16c11b3f | [
"Apache-2.0"
] | 1 | 2020-09-16T09:19:08.000Z | 2020-09-16T09:19:08.000Z | defmodule Cain.Endpoint.VariableInstance do
def get_list(query) do
{:get, "/variable-instance", query, %{}}
end
end
| 20.666667 | 44 | 0.701613 |
7342c300aaee9bbd8c7056e57f6bae74287ca68e | 2,140 | exs | Elixir | test/acceptance/html1/line_breaks_test.exs | ZeLarpMaster/earmark | 35c9661d6647059e507c0278347e21d92351c417 | [
"Apache-1.1"
] | null | null | null | test/acceptance/html1/line_breaks_test.exs | ZeLarpMaster/earmark | 35c9661d6647059e507c0278347e21d92351c417 | [
"Apache-1.1"
] | null | null | null | test/acceptance/html1/line_breaks_test.exs | ZeLarpMaster/earmark | 35c9661d6647059e507c0278347e21d92351c417 | [
"Apache-1.1"
] | null | null | null | defmodule Acceptance.Html1.LineBreaksTest do
use ExUnit.Case, async: true
import Support.Html1Helpers
@moduletag :html1
describe "Forced Line Breaks" do
test "with two spaces" do
markdown = "The \nquick"
html = para([ "The", :br, "quick" ])
messages = []
assert to_html1(markdown) == {:ok, html, messages}
end
test "or more spaces" do
markdown = "The \nquick"
html = para([ "The", :br, "quick" ])
messages = []
assert to_html1(markdown) == {:ok, html, messages}
end
test "or in some lines" do
markdown = "The \nquick \nbrown"
html = para(["The", :br, "quick", :br, "brown"])
messages = []
assert to_html1(markdown) == {:ok, html, messages}
end
test "and in list items" do
markdown = "* The \nquick"
html = construct([
:ul, :li, "The", :br, "quick" ])
messages = []
assert to_html1(markdown) == {:ok, html, messages}
end
end
describe "No Forced Line Breaks" do
test "with only one space" do
markdown = "The \nquick"
html = para("The \nquick")
messages = []
assert to_html1(markdown) == {:ok, html, messages}
end
test "or whitspace lines" do
markdown = "The\n \nquick"
html = construct([
{:p, nil, "The"},
{:p, nil, "quick"} ])
messages = []
assert to_html1(markdown) == {:ok, html, messages}
end
test "or inside the line" do
markdown = "The quick\nbrown"
html = para("The quick\nbrown")
messages = []
assert to_html1(markdown) == {:ok, html, messages}
end
test "or at the end of input" do
markdown = "The\nquick "
html = para("The\nquick ")
messages = []
assert to_html1(markdown) == {:ok, html, messages}
end
test "or in code blocks" do
markdown = "```\nThe \nquick\n```"
html = ~s{<pre><code class="">The \nquick</code></pre>\n}
messages = []
assert to_html1(markdown) == {:ok, html, messages}
end
end
end
# SPDX-License-Identifier: Apache-2.0
| 25.783133 | 68 | 0.548598 |
7342fb259289f97db70bd584154c2469fe088694 | 962 | ex | Elixir | apps/dockup_ui/web/services/delete_expired_deployments_service.ex | rudydydy/dockup | 0d05d1ef65cc5523800bd852178361521cd3e7d8 | [
"MIT"
] | null | null | null | apps/dockup_ui/web/services/delete_expired_deployments_service.ex | rudydydy/dockup | 0d05d1ef65cc5523800bd852178361521cd3e7d8 | [
"MIT"
] | null | null | null | apps/dockup_ui/web/services/delete_expired_deployments_service.ex | rudydydy/dockup | 0d05d1ef65cc5523800bd852178361521cd3e7d8 | [
"MIT"
] | null | null | null | defmodule DockupUi.DeleteExpiredDeploymentsService do
@moduledoc """
This module is reponsible for fetching all deployments older than certain
amount of time (as defined in config) and queueing them for deletiing using
DeleteDeploymentService
"""
import Ecto.Query, only: [from: 2]
require Logger
alias DockupUi.{
DeleteDeploymentService,
Deployment,
Repo
}
def run(service \\ DeleteDeploymentService, retention_days \\ nil) do
Logger.info "Running DeleteExpiredDeploymentsService"
retention_days = retention_days || get_retention_days()
query = from d in Deployment,
where: d.inserted_at < ago(^retention_days, "day"),
select: d.id
query
|> Repo.all
|> Flow.from_enumerable()
|> Flow.map(fn id -> service.run(id) end)
|> Flow.run()
end
defp get_retention_days() do
System.get_env("DOCKUP_RETENTION_DAYS") || Application.fetch_env!(:dockup_ui, :retention_days)
end
end
| 26 | 98 | 0.711019 |
73430cdf10074a226b1002182499b3fdbe6e12ce | 3,296 | exs | Elixir | test/gim/repo/table_test.exs | jan-sti/gim | 1b8be6c2163577f375825170cc9b01674e59b646 | [
"ECL-2.0",
"Apache-2.0"
] | 4 | 2020-01-21T09:15:24.000Z | 2021-02-04T21:21:56.000Z | test/gim/repo/table_test.exs | jan-sti/gim | 1b8be6c2163577f375825170cc9b01674e59b646 | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2020-04-06T05:20:09.000Z | 2020-06-09T09:56:20.000Z | test/gim/repo/table_test.exs | jan-sti/gim | 1b8be6c2163577f375825170cc9b01674e59b646 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2020-04-22T08:44:35.000Z | 2020-04-22T08:44:35.000Z | defmodule GimTest.TableTest do
@moduledoc false
use ExUnit.Case
alias Gim.Repo.Table
alias GimTest.Animal
for module <- [Table.Ets] do
mod_name = Module.concat(__MODULE__, module)
defmodule mod_name do
@moduledoc false
use ExUnit.Case, async: false
setup_all do
module = unquote(module)
table = module.new(__MODULE__, Animal)
Enum.each(Animal.data(), &module.insert(table, &1))
[table: table, module: module]
end
describe "#{module} -- tests" do
@tag table: true
test "basic testing", %{module: module, table: _table} do
assert module == unquote(module)
end
@tag table: true
@tag data_animal: true
test "all", %{module: module, table: table} do
assert {:ok, animals} = module.query(table, nil, [])
assert 473 == length(animals)
end
@tag table: true
@tag data_animal: true
test "fetch", %{module: module, table: table} do
assert {:ok, [%Animal{impound_no: "K12-000416"}]} =
module.query(table, nil, impound_no: "K12-000416")
end
@tag table: true
@tag data_animal: true
test "simple queries", %{module: module, table: table} do
{:ok, dogs} = module.query(table, nil, animal_type: "Dog")
for dog <- dogs do
assert dog.animal_type == "Dog"
end
{:ok, cats} = module.query(table, nil, animal_type: "Cat")
for cat <- cats do
assert cat.animal_type == "Cat"
end
{:ok, cats_and_dogs} =
module.query(table, nil, {:or, [animal_type: "Cat", animal_type: "Dog"]})
{:ok, male_dogs_and_female_cats} =
module.query(
table,
nil,
{:or,
[
{:and, [animal_type: "Cat", sex: :female]},
{:and, [animal_type: "Dog", sex: :male]}
]}
)
for animal <- male_dogs_and_female_cats do
assert (animal.animal_type == "Cat" and animal.sex == :female) or
(animal.animal_type == "Dog" and animal.sex == :male)
end
{:ok, female_dogs_and_male_cats} =
module.query(
table,
nil,
{:or,
[
{:and, [animal_type: "Cat", sex: :male]},
{:and, [animal_type: "Dog", sex: :female]}
]}
)
for animal <- female_dogs_and_male_cats do
assert (animal.animal_type == "Dog" and animal.sex == :female) or
(animal.animal_type == "Cat" and animal.sex == :male)
end
assert 473 == length(dogs) + length(cats)
assert 473 == length(male_dogs_and_female_cats) + length(female_dogs_and_male_cats)
assert 473 == length(cats_and_dogs)
end
@tag table: true
@tag data_animal: true
test "filter with function", %{module: module, table: table} do
assert {:ok, animals} =
module.query(table, nil, impound_no: &String.ends_with?(&1, "1"))
assert 45 == length(animals)
end
end
end
end
end
| 29.963636 | 93 | 0.519114 |
73431c06f2e7f16187e0463b48bf7c26b32fab34 | 1,675 | ex | Elixir | clients/gke_hub/lib/google_api/gke_hub/v1/model/identity_service_membership_spec.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/gke_hub/lib/google_api/gke_hub/v1/model/identity_service_membership_spec.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/gke_hub/lib/google_api/gke_hub/v1/model/identity_service_membership_spec.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.GKEHub.V1.Model.IdentityServiceMembershipSpec do
@moduledoc """
**Anthos Identity Service**: Configuration for a single Membership.
## Attributes
* `authMethods` (*type:* `list(GoogleApi.GKEHub.V1.Model.IdentityServiceAuthMethod.t)`, *default:* `nil`) - A member may support multiple auth methods.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:authMethods => list(GoogleApi.GKEHub.V1.Model.IdentityServiceAuthMethod.t()) | nil
}
field(:authMethods, as: GoogleApi.GKEHub.V1.Model.IdentityServiceAuthMethod, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.GKEHub.V1.Model.IdentityServiceMembershipSpec do
def decode(value, options) do
GoogleApi.GKEHub.V1.Model.IdentityServiceMembershipSpec.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.GKEHub.V1.Model.IdentityServiceMembershipSpec do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.638298 | 155 | 0.757015 |
7343289c265fca0b336e1ee333b129c663bc7a26 | 218 | ex | Elixir | apps/heroes_web/lib/web/plugs/redirect.ex | miquecg/heroes-board-game | 2e8745c7577060a7fc80ad1e4c38c2c507a8c488 | [
"MIT"
] | 2 | 2020-04-09T06:42:15.000Z | 2022-03-03T15:52:38.000Z | apps/heroes_web/lib/web/plugs/redirect.ex | miquecg/heroes-board-game | 2e8745c7577060a7fc80ad1e4c38c2c507a8c488 | [
"MIT"
] | 5 | 2020-04-28T16:05:46.000Z | 2021-01-03T14:08:19.000Z | apps/heroes_web/lib/web/plugs/redirect.ex | miquecg/heroes-board-game | 2e8745c7577060a7fc80ad1e4c38c2c507a8c488 | [
"MIT"
] | null | null | null | defmodule Web.Plugs.Redirect do
@moduledoc """
Simple redirect Plug.
"""
def init(opts), do: opts
def call(conn, opts) do
conn
|> Phoenix.Controller.redirect(opts)
|> Plug.Conn.halt()
end
end
| 15.571429 | 40 | 0.637615 |
73435168928c81176f16a500c1ef8c357006422c | 355 | ex | Elixir | lib/blue_jet/app/storefront/external/balance_service.ex | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 44 | 2018-05-09T01:08:57.000Z | 2021-01-19T07:25:26.000Z | lib/blue_jet/app/storefront/external/balance_service.ex | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 36 | 2018-05-08T23:59:54.000Z | 2018-09-28T13:50:30.000Z | lib/blue_jet/app/storefront/external/balance_service.ex | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 9 | 2018-05-09T14:09:19.000Z | 2021-03-21T21:04:04.000Z | defmodule BlueJet.Storefront.BalanceService do
@balance_service Application.get_env(:blue_jet, :storefront)[:balance_service]
@callback list_payment(map, map) :: list(map)
@callback count_payment(map, map) :: integer
defdelegate list_payment(fields, opts), to: @balance_service
defdelegate count_payment(fields, opts), to: @balance_service
end | 39.444444 | 80 | 0.785915 |
73435dbded9ab638c414f4ef11d6af541bc7c85c | 5,999 | exs | Elixir | mix.exs | nicksen/livebook | f5afac94970d9c3bd3ad93f6c633e305e9bce30a | [
"Apache-2.0"
] | null | null | null | mix.exs | nicksen/livebook | f5afac94970d9c3bd3ad93f6c633e305e9bce30a | [
"Apache-2.0"
] | null | null | null | mix.exs | nicksen/livebook | f5afac94970d9c3bd3ad93f6c633e305e9bce30a | [
"Apache-2.0"
] | null | null | null | defmodule Livebook.MixProject do
use Mix.Project
@elixir_requirement "~> 1.13"
@version "0.6.1"
@description "Interactive and collaborative code notebooks - made with Phoenix LiveView"
@app_elixir_version "1.13.4"
def project do
[
app: :livebook,
version: @version,
elixir: @elixir_requirement,
name: "Livebook",
description: @description,
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: with_lock(target_deps(Mix.target()) ++ deps()),
escript: escript(),
package: package(),
default_release: :livebook,
releases: releases()
]
end
def application do
[
mod: {Livebook.Application, []},
extra_applications:
[:logger, :runtime_tools, :os_mon, :inets, :ssl, :xmerl] ++
extra_applications(Mix.target()),
env: Application.get_all_env(:livebook)
]
end
defp extra_applications(:app), do: [:wx]
defp extra_applications(_), do: []
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp package do
[
licenses: ["Apache-2.0"],
links: %{
"GitHub" => "https://github.com/livebook-dev/livebook"
},
files:
~w(lib static config mix.exs mix.lock README.md LICENSE CHANGELOG.md iframe/priv/static/iframe)
]
end
defp aliases do
[
"dev.setup": ["deps.get", "cmd npm install --prefix assets"],
"dev.build": ["cmd npm run deploy --prefix ./assets"],
"format.all": ["format", "cmd npm run format --prefix ./assets"]
]
end
defp escript do
[
main_module: LivebookCLI,
app: nil
]
end
## Dependencies
# Although we use requirements here, the with_lock() function
# below ensures we only use the locked versions. This is important
# for two reasons:
#
# 1. because we bundle assets from phoenix, phoenix_live_view,
# and phoenix_html, we want to make sure we have those exact
# versions
#
# 2. we don't want users to potentially get a new dependency
# when installing from git or as an escript
#
# Therefore, to update any dependency, you must call before:
#
# mix deps.unlock foo bar baz
#
defp deps do
[
{:phoenix, "~> 1.5"},
{:phoenix_html, "~> 3.0"},
{:phoenix_live_view, "~> 0.17.3"},
{:phoenix_live_dashboard, "~> 0.6.0"},
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 1.0"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:earmark_parser, "~> 1.4"},
{:castore, "~> 0.1.0"},
{:aws_signature, "~> 0.2.0"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:floki, ">= 0.27.0", only: :test},
{:bypass, "~> 2.1", only: :test}
]
end
defp target_deps(:app), do: [{:app_builder, path: "app_builder"}]
defp target_deps(_), do: []
@lock (with {:ok, contents} <- File.read("mix.lock"),
{:ok, quoted} <- Code.string_to_quoted(contents, warn_on_unnecessary_quotes: false),
{%{} = lock, _binding} <- Code.eval_quoted(quoted, []) do
for {dep, hex} when elem(hex, 0) == :hex <- lock,
do: {dep, elem(hex, 2)},
into: %{}
else
_ -> %{}
end)
defp with_lock(deps) do
for dep <- deps do
name = elem(dep, 0)
put_elem(dep, 1, @lock[name] || elem(dep, 1))
end
end
## Releases
defp releases do
[
livebook: [
include_executables_for: [:unix],
include_erts: false,
rel_templates_path: "rel/server",
steps: [:assemble, &remove_cookie/1]
],
mac_app: [
include_executables_for: [:unix],
include_erts: false,
rel_templates_path: "rel/app",
steps: [:assemble, &remove_cookie/1, &standalone_erlang_elixir/1, &build_mac_app/1]
],
mac_app_dmg: [
include_executables_for: [:unix],
include_erts: false,
rel_templates_path: "rel/app",
steps: [:assemble, &remove_cookie/1, &standalone_erlang_elixir/1, &build_mac_app_dmg/1]
],
windows_installer: [
include_executables_for: [:windows],
include_erts: false,
rel_templates_path: "rel/app",
steps: [
:assemble,
&remove_cookie/1,
&standalone_erlang_elixir/1,
&build_windows_installer/1
]
]
]
end
defp remove_cookie(release) do
File.rm!(Path.join(release.path, "releases/COOKIE"))
release
end
defp standalone_erlang_elixir(release) do
Code.require_file("rel/app/standalone.exs")
release
|> Standalone.copy_otp()
|> Standalone.copy_elixir(@app_elixir_version)
end
@app_options [
name: "Livebook",
version: @version,
logo_path: "rel/app/mac-icon.png",
additional_paths: [
"/rel/erts-#{:erlang.system_info(:version)}/bin",
"/rel/vendor/elixir/bin"
],
url_schemes: ["livebook"],
document_types: [
%{
name: "LiveMarkdown",
extensions: ["livemd"],
# macos specific
role: "Editor"
}
]
]
defp build_mac_app(release) do
AppBuilder.build_mac_app(release, @app_options)
end
defp build_mac_app_dmg(release) do
options =
[
codesign: [
identity: System.fetch_env!("CODESIGN_IDENTITY")
],
notarize: [
team_id: System.fetch_env!("NOTARIZE_TEAM_ID"),
apple_id: System.fetch_env!("NOTARIZE_APPLE_ID"),
password: System.fetch_env!("NOTARIZE_PASSWORD")
]
] ++ @app_options
AppBuilder.build_mac_app_dmg(release, options)
end
defp build_windows_installer(release) do
options =
Keyword.take(@app_options, [:name, :version, :url_schemes, :document_types]) ++
[module: LivebookApp, logo_path: "static/images/logo.png"]
AppBuilder.build_windows_installer(release, options)
end
end
| 27.022523 | 103 | 0.592099 |
73435f40044c7caa8b9cdd22c313e94268973507 | 3,352 | ex | Elixir | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1beta2__timestamped_object.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1beta2__timestamped_object.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1beta2__timestamped_object.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_TimestampedObject do
@moduledoc """
For tracking related features. An object at time_offset with attributes, and located with normalized_bounding_box.
## Attributes
* `attributes` (*type:* `list(GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_DetectedAttribute.t)`, *default:* `nil`) - Optional. The attributes of the object in the bounding box.
* `landmarks` (*type:* `list(GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_DetectedLandmark.t)`, *default:* `nil`) - Optional. The detected landmarks.
* `normalizedBoundingBox` (*type:* `GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_NormalizedBoundingBox.t`, *default:* `nil`) - Normalized Bounding box in a frame, where the object is located.
* `timeOffset` (*type:* `String.t`, *default:* `nil`) - Time-offset, relative to the beginning of the video, corresponding to the video frame for this object.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:attributes =>
list(
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_DetectedAttribute.t()
)
| nil,
:landmarks =>
list(
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_DetectedLandmark.t()
)
| nil,
:normalizedBoundingBox =>
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_NormalizedBoundingBox.t()
| nil,
:timeOffset => String.t() | nil
}
field(:attributes,
as:
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_DetectedAttribute,
type: :list
)
field(:landmarks,
as: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_DetectedLandmark,
type: :list
)
field(:normalizedBoundingBox,
as:
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_NormalizedBoundingBox
)
field(:timeOffset)
end
defimpl Poison.Decoder,
for: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_TimestampedObject do
def decode(value, options) do
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_TimestampedObject.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_TimestampedObject do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.904762 | 223 | 0.74284 |
73436b86db4ef3fd38d8ab7286cbd9f25b8ab7ed | 1,909 | exs | Elixir | mix.exs | nerves-project/nerves_motd | 45573c8d13e8d526bc4ada19ada2f86753dd4461 | [
"Apache-2.0"
] | 5 | 2021-09-09T12:57:27.000Z | 2021-12-21T22:46:58.000Z | mix.exs | nerves-project/nerves_motd | 45573c8d13e8d526bc4ada19ada2f86753dd4461 | [
"Apache-2.0"
] | 19 | 2021-09-07T14:07:57.000Z | 2022-03-08T02:34:11.000Z | mix.exs | nerves-project/nerves_motd | 45573c8d13e8d526bc4ada19ada2f86753dd4461 | [
"Apache-2.0"
] | 2 | 2021-11-01T20:27:43.000Z | 2021-11-23T21:56:33.000Z | defmodule NervesMOTD.MixProject do
use Mix.Project
@version "0.1.6"
@source_url "https://github.com/nerves-project/nerves_motd"
def project do
[
app: :nerves_motd,
version: @version,
elixir: "~> 1.9",
elixirc_paths: elixirc_paths(Mix.env()),
docs: docs(),
description: description(),
package: package(),
deps: deps(),
dialyzer: dialyzer(),
preferred_cli_env: %{
docs: :docs,
"hex.publish": :docs,
"hex.build": :docs
}
]
end
def application do
[]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp deps do
[
{:nerves_runtime, "~> 0.8"},
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:dialyxir, "~> 1.1", only: [:dev, :test], runtime: false},
{:ex_doc, "~> 0.25", only: :docs, runtime: false},
{:mix_test_watch, "~> 1.1", only: :dev, runtime: false},
{:mox, "~> 1.0", only: :test}
] ++ maybe_nerves_time_zones()
end
if Version.match?(System.version(), ">= 1.11.0") do
defp maybe_nerves_time_zones() do
[{:nerves_time_zones, "~> 0.1", optional: true}]
end
else
defp maybe_nerves_time_zones(), do: []
end
defp docs do
[
extras: ["README.md", "CHANGELOG.md"],
main: "readme",
source_ref: "v#{@version}",
source_url: @source_url,
skip_undefined_reference_warnings_on: ["CHANGELOG.md"]
]
end
defp description do
"Message of the day for Nerves devices"
end
defp package do
[
files: ["CHANGELOG.md", "lib", "LICENSE", "mix.exs", "README.md"],
licenses: ["Apache-2.0"],
links: %{"Github" => @source_url}
]
end
defp dialyzer() do
[
flags: [:race_conditions, :unmatched_returns, :error_handling, :underspecs],
plt_add_apps: [:nerves_time_zones]
]
end
end
| 23.567901 | 82 | 0.573075 |
73437a147cd1c284159354dc21c5cb63d510b225 | 756 | ex | Elixir | lib/cachex/actions/del.ex | botwerk/cachex | d37996d3be35b0d8281e347d44c024ecf2735131 | [
"MIT"
] | 946 | 2017-06-26T00:36:58.000Z | 2022-03-29T19:52:31.000Z | lib/cachex/actions/del.ex | botwerk/cachex | d37996d3be35b0d8281e347d44c024ecf2735131 | [
"MIT"
] | 152 | 2017-06-28T10:01:24.000Z | 2022-03-24T18:46:13.000Z | lib/cachex/actions/del.ex | botwerk/cachex | d37996d3be35b0d8281e347d44c024ecf2735131 | [
"MIT"
] | 84 | 2017-06-30T05:30:31.000Z | 2022-03-01T20:23:16.000Z | defmodule Cachex.Actions.Del do
@moduledoc false
# Command module to allow removal of a cache entry.
alias Cachex.Services.Locksmith
# import required macros
import Cachex.Spec
##############
# Public API #
##############
@doc """
Removes an entry from a cache by key.
This command will always return a true value, signalling that the key no longer
exists in the cache (regardless of whether it previously existed).
Removal runs in a lock aware context, to ensure that we're not removing a key
being used inside a transaction in other places in the codebase.
"""
def execute(cache(name: name) = cache, key, _options) do
Locksmith.write(cache, [ key ], fn ->
{ :ok, :ets.delete(name, key) }
end)
end
end
| 27 | 81 | 0.675926 |
734390cb44d5ebf76f2bb5bc1c63447b64548fce | 66,598 | ex | Elixir | clients/big_query_reservation/lib/google_api/big_query_reservation/v1/api/projects.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/big_query_reservation/lib/google_api/big_query_reservation/v1/api/projects.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/big_query_reservation/lib/google_api/big_query_reservation/v1/api/projects.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigQueryReservation.V1.Api.Projects do
@moduledoc """
API calls for all endpoints tagged `Projects`.
"""
alias GoogleApi.BigQueryReservation.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Retrieves a BI reservation.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. Name of the requested reservation, for example:
`projects/{project_id}/locations/{location_id}/bireservation`
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.BiReservation{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_get_bi_reservation(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.BiReservation.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_get_bi_reservation(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.BiReservation{}])
end
@doc """
Looks up assignments for a specified resource for a particular region.
If the request is about a project:
1) Assignments created on the project will be returned if they exist.
2) Otherwise assignments created on the closest ancestor will be
returned. 3) Assignments for different JobTypes will all be returned.
Same logic applies if the request is about a folder.
If the request is about an organization, then assignments created on the
organization will be returned (organization doesn't have ancestors).
Comparing to ListAssignments, there are some behavior
differences:
1) permission on the assignee will be verified in this API.
2) Hierarchy lookup (project->folder->organization) happens in this API.
3) Parent here is projects/*/locations/*, instead of
projects/*/locations/*reservations/*.
Note "-" cannot be used for projects
nor locations.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The resource name of the admin project(containing project and location),
e.g.:
"projects/myproject/locations/US".
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - The maximum number of items to return.
* `:pageToken` (*type:* `String.t`) - The next_page_token value returned from a previous List request, if any.
* `:query` (*type:* `String.t`) - Please specify resource name as assignee in the query.
e.g., "assignee=projects/myproject"
"assignee=folders/123"
"assignee=organizations/456"
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.SearchAssignmentsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_search_assignments(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.SearchAssignmentsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_search_assignments(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query,
:query => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+parent}:searchAssignments", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.SearchAssignmentsResponse{}]
)
end
@doc """
Updates a BI reservation.
Only fields specified in the field_mask are updated.
Singleton BI reservation always exists with default size 0.
In order to reserve BI capacity it needs to be updated to an amount
greater than 0. In order to release BI capacity reservation size
must be set to 0.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The resource name of the singleton BI reservation.
Reservation names have the form
`projects/{project_id}/locations/{location_id}/bireservation`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - A list of fields to be updated in this request.
* `:body` (*type:* `GoogleApi.BigQueryReservation.V1.Model.BiReservation.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.BiReservation{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_update_bi_reservation(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.BiReservation.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_update_bi_reservation(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.BiReservation{}])
end
@doc """
Creates a new capacity commitment resource.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. Resource name of the parent reservation. E.g.,
projects/myproject/locations/US
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:enforceSingleAdminProjectPerOrg` (*type:* `boolean()`) - If true, fail the request if another project in the organization has a
capacity commitment.
* `:body` (*type:* `GoogleApi.BigQueryReservation.V1.Model.CapacityCommitment.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.CapacityCommitment{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_capacity_commitments_create(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.CapacityCommitment.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_capacity_commitments_create(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:enforceSingleAdminProjectPerOrg => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+parent}/capacityCommitments", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.CapacityCommitment{}]
)
end
@doc """
Deletes a capacity commitment. Attempting to delete capacity commitment
before its commitment_end_time will fail with the error code
`google.rpc.Code.FAILED_PRECONDITION`.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. Resource name of the capacity commitment to delete. E.g.,
projects/myproject/locations/US/capacityCommitments/123
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_capacity_commitments_delete(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_capacity_commitments_delete(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.Empty{}])
end
@doc """
Returns information about the capacity commitment.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. Resource name of the capacity commitment to retrieve. E.g.,
projects/myproject/locations/US/capacityCommitments/123
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.CapacityCommitment{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_capacity_commitments_get(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.CapacityCommitment.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_capacity_commitments_get(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.CapacityCommitment{}]
)
end
@doc """
Lists all the capacity commitments for the admin project.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. Resource name of the parent reservation. E.g.,
projects/myproject/locations/US
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - The maximum number of items to return.
* `:pageToken` (*type:* `String.t`) - The next_page_token value returned from a previous List request, if any.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.ListCapacityCommitmentsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_capacity_commitments_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.ListCapacityCommitmentsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_capacity_commitments_list(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+parent}/capacityCommitments", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.ListCapacityCommitmentsResponse{}]
)
end
@doc """
Merges capacity commitments of the same plan into one. Resulting capacity
commitment has the longer commitment_end_time out of the two. Attempting to
merge capacity commitments of different plan will fail with the error code
`google.rpc.Code.FAILED_PRECONDITION`.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Parent resource that identifies admin project and location e.g.,
projects/myproject/locations/us
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.BigQueryReservation.V1.Model.MergeCapacityCommitmentsRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.CapacityCommitment{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_capacity_commitments_merge(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.CapacityCommitment.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_capacity_commitments_merge(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+parent}/capacityCommitments:merge", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.CapacityCommitment{}]
)
end
@doc """
Updates an existing capacity commitment.
Only plan and renewal_plan fields can be updated.
Plan can only be changed to a plan of a longer commitment period.
Attempting to change to a plan with shorter commitment period will fail
with the error code `google.rpc.Code.FAILED_PRECONDITION`.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Output only. The resource name of the capacity commitment, e.g.,
projects/myproject/locations/US/capacityCommitments/123
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - Standard field mask for the set of fields to be updated.
* `:body` (*type:* `GoogleApi.BigQueryReservation.V1.Model.CapacityCommitment.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.CapacityCommitment{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_capacity_commitments_patch(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.CapacityCommitment.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_capacity_commitments_patch(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.CapacityCommitment{}]
)
end
@doc """
Splits capacity commitment to two commitments of the same plan and
commitment_end_time. A common use case to do that is to perform a downgrade
e.g., in order to downgrade from 10000 slots to 8000, one might split 10000
capacity commitment to 2000 and 8000, change the plan of the first one to
flex and then delete it.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The resource name e.g.,:
projects/myproject/locations/US/capacityCommitments/123
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.BigQueryReservation.V1.Model.SplitCapacityCommitmentRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.SplitCapacityCommitmentResponse{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_capacity_commitments_split(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.SplitCapacityCommitmentResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_capacity_commitments_split(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+name}:split", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.SplitCapacityCommitmentResponse{}]
)
end
@doc """
Creates a new reservation resource.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. Project, location. E.g.,
projects/myproject/locations/US
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:reservationId` (*type:* `String.t`) - The reservation ID. This field must only contain lower case alphanumeric
characters or dash. Max length is 64 characters.
* `:body` (*type:* `GoogleApi.BigQueryReservation.V1.Model.Reservation.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.Reservation{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_reservations_create(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.Reservation.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_reservations_create(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:reservationId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+parent}/reservations", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.Reservation{}])
end
@doc """
Deletes a reservation.
Returns `google.rpc.Code.FAILED_PRECONDITION` when reservation has
assignments.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. Resource name of the reservation to retrieve. E.g.,
projects/myproject/locations/US/reservations/team1-prod
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_reservations_delete(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_reservations_delete(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.Empty{}])
end
@doc """
Returns information about the reservation.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. Resource name of the reservation to retrieve. E.g.,
projects/myproject/locations/US/reservations/team1-prod
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.Reservation{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_reservations_get(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.Reservation.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_reservations_get(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.Reservation{}])
end
@doc """
Lists all the reservations for the project in the specified location.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The parent resource name containing project and location, e.g.:
"projects/myproject/locations/US"
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - The maximum number of items to return.
* `:pageToken` (*type:* `String.t`) - The next_page_token value returned from a previous List request, if any.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.ListReservationsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_reservations_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.ListReservationsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_reservations_list(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+parent}/reservations", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.ListReservationsResponse{}]
)
end
@doc """
Updates an existing reservation resource.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The resource name of the reservation, e.g.,
"projects/*/locations/*/reservations/team1-prod".
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - Standard field mask for the set of fields to be updated.
* `:body` (*type:* `GoogleApi.BigQueryReservation.V1.Model.Reservation.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.Reservation{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_reservations_patch(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.Reservation.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_reservations_patch(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.Reservation{}])
end
@doc """
Creates an object which allows the given project to submit jobs
of a certain type using slots from the specified reservation. Currently a
resource (project, folder, organization) can only have one assignment per
{job_type, location}, and that reservation will be used for all jobs of the
matching type. Within the organization, different assignments can be
created on projects, folders or organization level. During query execution,
the assignment is looked up at the project, folder and organization levels
in that order. The first assignment found is applied to the query. When
creating assignments, it does not matter if other assignments exist at
higher levels. E.g: organizationA contains project1, project2. Assignments
for organizationA, project1 and project2 could all be created, mapping to
the same or different reservations.
Returns `google.rpc.Code.PERMISSION_DENIED` if user does not have
'bigquery.admin' permissions on the project using the reservation
and the project that owns this reservation.
Returns `google.rpc.Code.INVALID_ARGUMENT` when location of the assignment
does not match location of the reservation.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The parent resource name of the assignment
E.g.: projects/myproject/locations/US/reservations/team1-prod
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.BigQueryReservation.V1.Model.Assignment.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.Assignment{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_reservations_assignments_create(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.Assignment.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_reservations_assignments_create(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+parent}/assignments", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.Assignment{}])
end
@doc """
Deletes a assignment. No expansion will happen.
E.g:
organizationA contains project1 and project2. Reservation res1 exists.
CreateAssignment was invoked previously and following assignments were
created explicitly:
<organizationA, res1>
<project1, res1>
Then deletion of <organizationA, res1> won't affect <project1, res1>. After
deletion of <organizationA, res1>, queries from project1 will still use
res1, while queries from project2 will use on-demand mode.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. Name of the resource, e.g.:
projects/myproject/locations/US/reservations/team1-prod/assignments/123
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_reservations_assignments_delete(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_reservations_assignments_delete(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.Empty{}])
end
@doc """
Lists assignments.
Only explicitly created assignments will be returned. E.g:
organizationA contains project1 and project2. Reservation res1 exists.
CreateAssignment was invoked previously and following assignments were
created explicitly:
<organizationA, res1>
<project1, res1>
Then this API will just return the above two assignments for reservation
res1, and no expansion/merge will happen. Wildcard "-" can be used for
reservations in the request. In that case all assignments belongs to the
specified project and location will be listed. Note
"-" cannot be used for projects nor locations.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The parent resource name e.g.:
projects/myproject/locations/US/reservations/team1-prod
Or:
projects/myproject/locations/US/reservations/-
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - The maximum number of items to return.
* `:pageToken` (*type:* `String.t`) - The next_page_token value returned from a previous List request, if any.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.ListAssignmentsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_reservations_assignments_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.ListAssignmentsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_reservations_assignments_list(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+parent}/assignments", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.ListAssignmentsResponse{}]
)
end
@doc """
Moves a assignment under a new reservation. Customers can do this by
deleting the existing assignment followed by creating another assignment
under the new reservation, but this method provides a transactional way to
do so, to make sure the assignee always has an associated reservation.
Without the method customers might see some queries run on-demand which
might be unexpected.
## Parameters
* `connection` (*type:* `GoogleApi.BigQueryReservation.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The resource name of the assignment,
e.g.:
projects/myproject/locations/US/reservations/team1-prod/assignments/123
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.BigQueryReservation.V1.Model.MoveAssignmentRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.BigQueryReservation.V1.Model.Assignment{}}` on success
* `{:error, info}` on failure
"""
@spec bigqueryreservation_projects_locations_reservations_assignments_move(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.BigQueryReservation.V1.Model.Assignment.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def bigqueryreservation_projects_locations_reservations_assignments_move(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+name}:move", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQueryReservation.V1.Model.Assignment{}])
end
end
| 44.133863 | 196 | 0.625709 |
7343b54c476232d51af8af6c72f8181e3f9641f2 | 1,371 | ex | Elixir | lib/codebeam_2021_web/router.ex | JacquiManzi/codebeam_metaprogramming_2021 | 297e944b6436b3247c9a9075061f3454bd084ac4 | [
"MIT"
] | null | null | null | lib/codebeam_2021_web/router.ex | JacquiManzi/codebeam_metaprogramming_2021 | 297e944b6436b3247c9a9075061f3454bd084ac4 | [
"MIT"
] | null | null | null | lib/codebeam_2021_web/router.ex | JacquiManzi/codebeam_metaprogramming_2021 | 297e944b6436b3247c9a9075061f3454bd084ac4 | [
"MIT"
] | null | null | null | defmodule Codebeam2021Web.Router do
use Codebeam2021Web, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :access_restriction do
plug :browser
plug(Codebeam2021.AccessRestrictionPlug)
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", Codebeam2021Web do
pipe_through :browser
get "/", PageController, :index
end
scope "/", Codebeam2021Web do
pipe_through :access_restriction
get "/user", PageController, :user
get "/admin_user", PageController, :admin_user
end
# Other scopes may use custom stacks.
# scope "/api", Codebeam2021Web do
# pipe_through :api
# end
# Enables LiveDashboard only for development
#
# If you want to use the LiveDashboard in production, you should put
# it behind authentication and allow only admins to access it.
# If your application does not have an admins-only section yet,
# you can use Plug.BasicAuth to set up some basic authentication
# as long as you are also using SSL (which you should anyway).
if Mix.env() in [:dev, :test] do
import Phoenix.LiveDashboard.Router
scope "/" do
pipe_through :browser
live_dashboard "/dashboard", metrics: Codebeam2021Web.Telemetry
end
end
end
| 24.927273 | 70 | 0.708972 |
7343bb7f72a604685ceeb1bb787df0a65962c65f | 23,781 | ex | Elixir | lib/fake_server/response.ex | anjaligr05/fake_server | cdca14b7f56dd8afc0acc2fad508928c669017da | [
"Apache-2.0"
] | null | null | null | lib/fake_server/response.ex | anjaligr05/fake_server | cdca14b7f56dd8afc0acc2fad508928c669017da | [
"Apache-2.0"
] | null | null | null | lib/fake_server/response.ex | anjaligr05/fake_server | cdca14b7f56dd8afc0acc2fad508928c669017da | [
"Apache-2.0"
] | null | null | null | defmodule FakeServer.Response do
@moduledoc """
Response structure and helpers.
FakeServer makes use of the `%FakeServer.Response{}` structure to define the responses that will be given by the server.
## Structure Fields
- `:status`: The status code of the response. It must be an integer.
- `:body`: Optional. The response body. Can be a string or a map. If the body is a map, it will be encoded so the map must be equivalent to a valid JSON.
- `:headers`: Optional. The response headers. Must be a map with the string keys.
You can use the `new/3` function to create a response. Since this function performs several validations, you should avoid to create the structure directly.
"""
@enforce_keys [:status]
defstruct [status: nil, body: "", headers: %{}]
@doc """
Creates a new Response structure. Returns `{:ok, response}` on success or `{:error, reason}` when validation fails
## Example
```elixir
iex> FakeServer.Response.new(200, %{name: "Test User", email: "[email protected]"}, %{"Content-Type" => "application/json"})
iex> FakeServer.Response.new(200, ~s<{"name":"Test User","email":"[email protected]"}>, %{"Content-Type" => "application/json"})
iex> FakeServer.Response.new(201, ~s<{"name":"Test User","email":"[email protected]"}>)
iex> FakeServer.Response.new(404)
```
"""
def new(status_code, body \\ "", headers \\ %{}) do
with response <- %__MODULE__{status: status_code, body: body, headers: headers},
:ok <- validate(response),
{:ok, response} <- ensure_body_format(response),
{:ok, response} <- ensure_headers_keys(response)
do
{:ok, response}
end
end
@doc """
Similar to `new/3`, but raises `FakeServer.Error` when validation fails.
"""
def new!(status_code, body \\ "", headers \\ %{}) do
case new(status_code, body, headers) do
{:ok, response} -> response
{:error, reason} -> raise FakeServer.Error, reason
end
end
@doc false
def validate({:ok, %__MODULE__{} = response}), do: validate(response)
def validate(%__MODULE__{body: body, status: status, headers: headers}) do
cond do
not is_map(headers) -> {:error, {headers, "response headers must be a map"}}
not (is_bitstring(body) or is_map(body)) -> {:error, {body, "body must be a map or a string"}}
not Enum.member?(allowed_status_codes(), status) -> {:error, {status, "invalid status code"}}
true -> :ok
end
end
def validate(response), do: {:error, {response, "invalid response type"}}
@doc """
Creates a new response with status 200
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def ok(body \\ "", headers \\ %{}), do: new(200, body, headers)
@doc """
Creates a new response with status 200 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def ok!(body \\ "", headers \\ %{}), do: new!(200, body, headers)
@doc """
Creates a new response with status 201
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def created(body \\ "", headers \\ %{}), do: new(201, body, headers)
@doc """
Creates a new response with status 201 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def created!(body \\ "", headers \\ %{}), do: new!(201, body, headers)
@doc """
Creates a new response with status 202
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def accepted(body \\ "", headers \\ %{}), do: new(202, body, headers)
@doc """
Creates a new response with status 202 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def accepted!(body \\ "", headers \\ %{}), do: new!(202, body, headers)
@doc """
Creates a new response with status 203.
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def non_authoritative_information(body \\ "", headers \\ %{}), do: new(203, body, headers)
@doc """
Creates a new response with status 203 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def non_authoritative_information!(body \\ "", headers \\ %{}), do: new!(203, body, headers)
@doc """
Creates a new response with status 204
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def no_content(body \\ "", headers \\ %{}), do: new(204, body, headers)
@doc """
Creates a new response with status 204 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def no_content!(body \\ "", headers \\ %{}), do: new!(204, body, headers)
@doc """
Creates a new response with status 205
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def reset_content(body \\ "", headers \\ %{}), do: new(205, body, headers)
@doc """
Creates a new response with status 205 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def reset_content!(body \\ "", headers \\ %{}), do: new!(205, body, headers)
@doc """
Creates a new response with status 206
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def partial_content(body \\ "", headers \\ %{}), do: new(206, body, headers)
@doc """
Creates a new response with status 206 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def partial_content!(body \\ "", headers \\ %{}), do: new!(206, body, headers)
@doc """
Returns a list with all 4xx HTTP methods available
"""
def all_4xx do
[
bad_request!(),
unauthorized!(),
forbidden!(),
not_found!(),
method_not_allowed!(),
not_acceptable!(),
proxy_authentication_required!(),
request_timeout!(),
conflict!(),
gone!(),
length_required!(),
precondition_failed!(),
payload_too_large!(),
uri_too_long!(),
unsupported_media_type!(),
expectation_failed!(),
im_a_teapot!(),
unprocessable_entity!(),
locked!(),
failed_dependency!(),
upgrade_required!(),
precondition_required!(),
too_many_requests!(),
request_header_fields_too_large!()
]
end
@doc """
Similar to `all_4xx/0`, but excludes the status codes in parameter.
"""
def all_4xx(except: except) do
all_4xx() |> Enum.reject(&(Enum.member?(except, &1.status)))
end
@doc """
Creates a new response with status 400
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def bad_request(body \\ "", headers \\ %{}), do: new(400, body, headers)
@doc """
Creates a new response with status 400 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def bad_request!(body \\ "", headers \\ %{}), do: new!(400, body, headers)
@doc """
Creates a new response with status 401
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def unauthorized(body \\ "", headers \\ %{}), do: new(401, body, headers)
@doc """
Creates a new response with status 401 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def unauthorized!(body \\ "", headers \\ %{}), do: new!(401, body, headers)
@doc """
Creates a new response with status 403
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def forbidden(body \\ "", headers \\ %{}), do: new(403, body, headers)
@doc """
Creates a new response with status 403 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def forbidden!(body \\ "", headers \\ %{}), do: new!(403, body, headers)
@doc """
Creates a new response with status 404
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def not_found(body \\ "", headers \\ %{}), do: new(404, body, headers)
@doc """
Creates a new response with status 404 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def not_found!(body \\ "", headers \\ %{}), do: new!(404, body, headers)
@doc """
Creates a new response with status 405
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def method_not_allowed(body \\ "", headers \\ %{}), do: new(405, body, headers)
@doc """
Creates a new response with status 405 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def method_not_allowed!(body \\ "", headers \\ %{}), do: new!(405, body, headers)
@doc """
Creates a new response with status 406
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def not_acceptable(body \\ "", headers \\ %{}), do: new(406, body, headers)
@doc """
Creates a new response with status 406 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def not_acceptable!(body \\ "", headers \\ %{}), do: new!(406, body, headers)
@doc """
Creates a new response with status 407
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def proxy_authentication_required(body \\ "", headers \\ %{}), do: new(407, body, headers)
@doc """
Creates a new response with status 407 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def proxy_authentication_required!(body \\ "", headers \\ %{}), do: new!(407, body, headers)
@doc """
Creates a new response with status 408
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def request_timeout(body \\ "", headers \\ %{}), do: new(408, body, headers)
@doc """
Creates a new response with status 408 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def request_timeout!(body \\ "", headers \\ %{}), do: new!(408, body, headers)
@doc """
Creates a new response with status 409
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def conflict(body \\ "", headers \\ %{}), do: new(409, body, headers)
@doc """
Creates a new response with status 409 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def conflict!(body \\ "", headers \\ %{}), do: new!(409, body, headers)
@doc """
Creates a new response with status 410
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def gone(body \\ "", headers \\ %{}), do: new(410, body, headers)
@doc """
Creates a new response with status 410 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def gone!(body \\ "", headers \\ %{}), do: new!(410, body, headers)
@doc """
Creates a new response with status 411
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def length_required(body \\ "", headers \\ %{}), do: new(411, body, headers)
@doc """
Creates a new response with status 411 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def length_required!(body \\ "", headers \\ %{}), do: new!(411, body, headers)
@doc """
Creates a new response with status 412
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def precondition_failed(body \\ "", headers \\ %{}), do: new(412, body, headers)
@doc """
Creates a new response with status 412 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def precondition_failed!(body \\ "", headers \\ %{}), do: new!(412, body, headers)
@doc """
Creates a new response with status 413
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def payload_too_large(body \\ "", headers \\ %{}), do: new(413, body, headers)
@doc """
Creates a new response with status 413 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def payload_too_large!(body \\ "", headers \\ %{}), do: new!(413, body, headers)
@doc """
Creates a new response with status 414
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def uri_too_long(body \\ "", headers \\ %{}), do: new(414, body, headers)
@doc """
Creates a new response with status 414 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def uri_too_long!(body \\ "", headers \\ %{}), do: new!(414, body, headers)
@doc """
Creates a new response with status 415
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def unsupported_media_type(body \\ "", headers \\ %{}), do: new(415, body, headers)
@doc """
Creates a new response with status 415 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def unsupported_media_type!(body \\ "", headers \\ %{}), do: new!(415, body, headers)
@doc """
Creates a new response with status 417
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def expectation_failed(body \\ "", headers \\ %{}), do: new(417, body, headers)
@doc """
Creates a new response with status 417 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def expectation_failed!(body \\ "", headers \\ %{}), do: new!(417, body, headers)
@doc """
Creates a new response with status 418
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def im_a_teapot(body \\ "", headers \\ %{}), do: new(418, body, headers)
@doc """
Creates a new response with status 418 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def im_a_teapot!(body \\ "", headers \\ %{}), do: new!(418, body, headers)
@doc """
Creates a new response with status 422
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def unprocessable_entity(body \\ "", headers \\ %{}), do: new(422, body, headers)
@doc """
Creates a new response with status 422 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def unprocessable_entity!(body \\ "", headers \\ %{}), do: new!(422, body, headers)
@doc """
Creates a new response with status 423
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def locked(body \\ "", headers \\ %{}), do: new(423, body, headers)
@doc """
Creates a new response with status 423 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def locked!(body \\ "", headers \\ %{}), do: new!(423, body, headers)
@doc """
Creates a new response with status 424
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def failed_dependency(body \\ "", headers \\ %{}), do: new(424, body, headers)
@doc """
Creates a new response with status 424 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def failed_dependency!(body \\ "", headers \\ %{}), do: new!(424, body, headers)
@doc """
Creates a new response with status 426
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def upgrade_required(body \\ "", headers \\ %{}), do: new(426, body, headers)
@doc """
Creates a new response with status 426 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def upgrade_required!(body \\ "", headers \\ %{}), do: new!(426, body, headers)
@doc """
Creates a new response with status 428
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def precondition_required(body \\ "", headers \\ %{}), do: new(428, body, headers)
@doc """
Creates a new response with status 428 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def precondition_required!(body \\ "", headers \\ %{}), do: new!(428, body, headers)
@doc """
Creates a new response with status 429
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def too_many_requests(body \\ "", headers \\ %{}), do: new(429, body, headers)
@doc """
Creates a new response with status 429 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def too_many_requests!(body \\ "", headers \\ %{}), do: new!(429, body, headers)
@doc """
Creates a new response with status 431
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def request_header_fields_too_large(body \\ "", headers \\ %{}), do: new(431, body, headers)
@doc """
Creates a new response with status 431 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def request_header_fields_too_large!(body \\ "", headers \\ %{}), do: new!(431, body, headers)
@doc """
Returns a list with all 5xx HTTP methods available.
"""
def all_5xx do
[
internal_server_error!(),
not_implemented!(),
bad_gateway!(),
service_unavailable!(),
gateway_timeout!(),
http_version_not_supported!(),
variant_also_negotiates!(),
insufficient_storage!(),
not_extended!(),
network_authentication_required!()
]
end
@doc """
Similar to `all_5xx/0`, but excludes the status codes in parameter.
"""
def all_5xx(except: except) do
all_5xx() |> Enum.reject(&(Enum.member?(except, &1.status)))
end
@doc """
Creates a new response with status 500
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def internal_server_error(body \\ "", headers \\ %{}), do: new(500, body, headers)
@doc """
Creates a new response with status 500 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def internal_server_error!(body \\ "", headers \\ %{}), do: new!(500, body, headers)
@doc """
Creates a new response with status 501
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def not_implemented(body \\ "", headers \\ %{}), do: new(501, body, headers)
@doc """
Creates a new response with status 501 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def not_implemented!(body \\ "", headers \\ %{}), do: new!(501, body, headers)
@doc """
Creates a new response with status 502
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def bad_gateway(body \\ "", headers \\ %{}), do: new(502, body, headers)
@doc """
Creates a new response with status 502 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def bad_gateway!(body \\ "", headers \\ %{}), do: new!(502, body, headers)
@doc """
Creates a new response with status 503
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def service_unavailable(body \\ "", headers \\ %{}), do: new(503, body, headers)
@doc """
Creates a new response with status 503 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def service_unavailable!(body \\ "", headers \\ %{}), do: new!(503, body, headers)
@doc """
Creates a new response with status 504
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def gateway_timeout(body \\ "", headers \\ %{}), do: new(504, body, headers)
@doc """
Creates a new response with status 504 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def gateway_timeout!(body \\ "", headers \\ %{}), do: new!(504, body, headers)
@doc """
Creates a new response with status 505
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def http_version_not_supported(body \\ "", headers \\ %{}), do: new(505, body, headers)
@doc """
Creates a new response with status 505 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def http_version_not_supported!(body \\ "", headers \\ %{}), do: new!(505, body, headers)
@doc """
Creates a new response with status 506
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def variant_also_negotiates(body \\ "", headers \\ %{}), do: new(506, body, headers)
@doc """
Creates a new response with status 506 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def variant_also_negotiates!(body \\ "", headers \\ %{}), do: new!(506, body, headers)
@doc """
Creates a new response with status 507
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def insufficient_storage(body \\ "", headers \\ %{}), do: new(507, body, headers)
@doc """
Creates a new response with status 507 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def insufficient_storage!(body \\ "", headers \\ %{}), do: new!(507, body, headers)
@doc """
Creates a new response with status 510
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def not_extended(body \\ "", headers \\ %{}), do: new(510, body, headers)
@doc """
Creates a new response with status 510 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def not_extended!(body \\ "", headers \\ %{}), do: new!(510, body, headers)
@doc """
Creates a new response with status 511
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def network_authentication_required(body \\ "", headers \\ %{}), do: new(511, body, headers)
@doc """
Creates a new response with status 511 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def network_authentication_required!(body \\ "", headers \\ %{}), do: new!(511, body, headers)
@doc """
FakeServer default response. Used when there are no responses left to reply.
```
iex> FakeServer.Response.default()
{:ok,
%FakeServer.Response{
body: "{\"message\": \"This is a default response from FakeServer\"}",
headers: %{},
status: 200
}
}
```
"""
def default, do: new(200, ~s<{"message": "This is a default response from FakeServer"}>)
@doc """
Similar to `default/0`.
"""
def default!, do: new!(200, ~s<{"message": "This is a default response from FakeServer"}>)
defp allowed_status_codes() do
[
100, 101, 102, 103, 200, 201, 202,
203, 204, 205, 206, 300, 301, 302,
303, 304, 305, 306, 307, 308, 400,
401, 403, 404, 405, 406, 407, 408,
409, 410, 411, 412, 413, 414, 415,
417, 418, 422, 423, 424, 426, 428,
429, 431, 500, 501, 502, 503, 504,
505, 506, 507, 510, 511
]
end
defp ensure_body_format(%__MODULE__{body: body} = response) when is_bitstring(body), do: {:ok, response}
defp ensure_body_format(%__MODULE__{body: body} = response) when is_map(body) do
case Poison.encode(body) do
{:ok, body} -> {:ok, %__MODULE__{response | body: body}}
{:error, _} -> {:error, {body, "could not turn body map into json"}}
end
end
defp ensure_headers_keys(%__MODULE__{headers: headers} = response) do
valid? = headers
|> Map.keys()
|> Enum.all?(&(is_bitstring(&1)))
if valid?, do: {:ok, response}, else: {:error, {headers, "all header keys must be strings"}}
end
end
| 31.665779 | 157 | 0.637568 |
7343c18eb83fa095a57ec544238e59f8f922d1e1 | 1,705 | exs | Elixir | app/config/config.exs | e8t-arena/phx_taste | 3c005f5fce1550bcba4489cdc4f5efde21ae5d84 | [
"MIT"
] | null | null | null | app/config/config.exs | e8t-arena/phx_taste | 3c005f5fce1550bcba4489cdc4f5efde21ae5d84 | [
"MIT"
] | null | null | null | app/config/config.exs | e8t-arena/phx_taste | 3c005f5fce1550bcba4489cdc4f5efde21ae5d84 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
import Config
config :app,
ecto_repos: [App.Repo]
# Configures the endpoint
config :app, AppWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "sQ1C1xT+2UYqkmLEe72JrMkuKUfRIRIyLEm4JDaYPX9vz+ClruHO0mXqXyIOI9jx",
render_errors: [view: AppWeb.ErrorView, accepts: ~w(html json), layout: false],
pubsub_server: App.PubSub,
live_view: [signing_salt: "pjxJW50W"]
# Configures the mailer.
#
# By default it uses the "Local" adapter which stores the emails
# locally. You can see the emails in your browser, at "/dev/mailbox".
#
# For production it's recommended to configure a different adapter
# at the `config/runtime.exs`.
config :app, App.Mailer, adapter: Swoosh.Adapters.Local
# Swoosh API client is needed for adapters other than SMTP.
config :swoosh, :api_client, false
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Configure esbuild (the version is required)
config :esbuild,
version: "0.12.15",
default: [
args: ~w(js/app.js --bundle --target=es2016 --outdir=../priv/static/js/),
cd: Path.expand("../assets", __DIR__),
env: %{"NODE_PATH" => Path.expand("../deps", __DIR__)}
]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{config_env()}.exs"
| 32.169811 | 86 | 0.738416 |
7343c60ea1e82a2c4ab976cc471b381b7caa42f6 | 16,164 | ex | Elixir | lib/ecto/adapters/sql.ex | alanpeabody/ecto | a8f655225b0f35e7f4d223b2d12be613121bb4f8 | [
"Apache-2.0"
] | null | null | null | lib/ecto/adapters/sql.ex | alanpeabody/ecto | a8f655225b0f35e7f4d223b2d12be613121bb4f8 | [
"Apache-2.0"
] | null | null | null | lib/ecto/adapters/sql.ex | alanpeabody/ecto | a8f655225b0f35e7f4d223b2d12be613121bb4f8 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Adapters.SQL do
@moduledoc """
Behaviour and implementation for SQL adapters.
The implementation for SQL adapter relies on `DBConnection`
to provide pooling, prepare, execute and more.
Developers that use `Ecto.Adapters.SQL` should implement
the callbacks required both by this module and the ones
from `Ecto.Adapters.SQL.Query` about building queries.
"""
@doc false
defmacro __using__(adapter) do
quote do
@behaviour Ecto.Adapter
@behaviour Ecto.Adapter.Migration
@behaviour Ecto.Adapter.Transaction
@conn __MODULE__.Connection
@adapter unquote(adapter)
@doc false
defmacro __before_compile__(env) do
Ecto.Adapters.SQL.__before_compile__(@conn, env)
end
@doc false
def application do
@adapter
end
@doc false
def child_spec(repo, opts) do
Ecto.Adapters.SQL.child_spec(@conn, @adapter, repo, opts)
end
## Types
@doc false
def autogenerate(:id), do: nil
def autogenerate(:embed_id), do: Ecto.UUID.generate()
def autogenerate(:binary_id), do: Ecto.UUID.bingenerate()
@doc false
def loaders({:embed, _} = type, _), do: [&Ecto.Adapters.SQL.load_embed(type, &1)]
def loaders(:binary_id, type), do: [Ecto.UUID, type]
def loaders(_, type), do: [type]
@doc false
def dumpers({:embed, _} = type, _), do: [&Ecto.Adapters.SQL.dump_embed(type, &1)]
def dumpers(:binary_id, type), do: [type, Ecto.UUID]
def dumpers(_, type), do: [type]
## Query
@doc false
def prepare(:all, query),
do: {:cache, {System.unique_integer([:positive]), @conn.all(query)}}
def prepare(:update_all, query),
do: {:cache, {System.unique_integer([:positive]), @conn.update_all(query)}}
def prepare(:delete_all, query),
do: {:cache, {System.unique_integer([:positive]), @conn.delete_all(query)}}
@doc false
def execute(repo, meta, query, params, process, opts) do
Ecto.Adapters.SQL.execute(repo, meta, query, params, process, opts)
end
@doc false
def insert_all(repo, %{source: {prefix, source}}, header, rows, returning, opts) do
Ecto.Adapters.SQL.insert_all(repo, @conn, prefix, source, header, rows, returning, opts)
end
@doc false
def insert(repo, %{source: {prefix, source}}, params, returning, opts) do
{fields, values} = :lists.unzip(params)
sql = @conn.insert(prefix, source, fields, [fields], returning)
Ecto.Adapters.SQL.struct(repo, @conn, sql, values, returning, opts)
end
@doc false
def update(repo, %{source: {prefix, source}}, fields, filter, returning, opts) do
{fields, values1} = :lists.unzip(fields)
{filter, values2} = :lists.unzip(filter)
sql = @conn.update(prefix, source, fields, filter, returning)
Ecto.Adapters.SQL.struct(repo, @conn, sql, values1 ++ values2, returning, opts)
end
@doc false
def delete(repo, %{source: {prefix, source}}, filter, opts) do
{filter, values} = :lists.unzip(filter)
sql = @conn.delete(prefix, source, filter, [])
Ecto.Adapters.SQL.struct(repo, @conn, sql, values, [], opts)
end
## Transaction
@doc false
def transaction(repo, opts, fun) do
Ecto.Adapters.SQL.transaction(repo, opts, fun)
end
@doc false
def in_transaction?(repo) do
Ecto.Adapters.SQL.in_transaction?(repo)
end
@doc false
def rollback(repo, value) do
Ecto.Adapters.SQL.rollback(repo, value)
end
## Migration
@doc false
def execute_ddl(repo, definition, opts) do
sql = @conn.execute_ddl(definition)
Ecto.Adapters.SQL.query!(repo, sql, [], opts)
:ok
end
defoverridable [prepare: 2, execute: 6, insert: 5, update: 6, delete: 4, insert_all: 6,
execute_ddl: 3, loaders: 2, dumpers: 2, autogenerate: 1]
end
end
@doc """
Converts the given query to SQL according to its kind and the
adapter in the given repository.
## Examples
The examples below are meant for reference. Each adapter will
return a different result:
Ecto.Adapters.SQL.to_sql(:all, repo, Post)
{"SELECT p.id, p.title, p.inserted_at, p.created_at FROM posts as p", []}
Ecto.Adapters.SQL.to_sql(:update_all, repo,
from(p in Post, update: [set: [title: ^"hello"]]))
{"UPDATE posts AS p SET title = $1", ["hello"]}
"""
@spec to_sql(:all | :update_all | :delete_all, Ecto.Repo.t, Ecto.Queryable.t) ::
{String.t, [term]}
def to_sql(kind, repo, queryable) do
adapter = repo.__adapter__
queryable
|> Ecto.Queryable.to_query()
|> Ecto.Query.Planner.returning(kind == :all)
|> Ecto.Query.Planner.query(kind, repo, adapter)
|> case do
{_meta, {:cached, {_id, cached}}, params} ->
{String.Chars.to_string(cached), params}
{_meta, {:cache, _update, {_id, prepared}}, params} ->
{prepared, params}
{_meta, {:nocache, {_id, prepared}}, params} ->
{prepared, params}
end
end
@doc """
Same as `query/4` but raises on invalid queries.
"""
@spec query!(Ecto.Repo.t, String.t, [term], Keyword.t) ::
%{rows: nil | [tuple], num_rows: non_neg_integer} | no_return
def query!(repo, sql, params, opts \\ []) do
query!(repo, sql, map_params(params), fn x -> x end, opts)
end
defp query!(repo, sql, params, mapper, opts) do
case query(repo, sql, params, mapper, opts) do
{:ok, result} -> result
{:error, err} -> raise err
end
end
@doc """
Runs custom SQL query on given repo.
In case of success, it must return an `:ok` tuple containing
a map with at least two keys:
* `:num_rows` - the number of rows affected
* `:rows` - the result set as a list. `nil` may be returned
instead of the list if the command does not yield any row
as result (but still yields the number of affected rows,
like a `delete` command without returning would)
## Options
* `:timeout` - The time in milliseconds to wait for a query to finish,
`:infinity` will wait indefinitely. (default: 15_000)
* `:pool_timeout` - The time in milliseconds to wait for a call to the pool
to finish, `:infinity` will wait indefinitely. (default: 5_000)
* `:log` - When false, does not log the query
## Examples
iex> Ecto.Adapters.SQL.query(MyRepo, "SELECT $1::integer + $2", [40, 2])
{:ok, %{rows: [{42}], num_rows: 1}}
"""
@spec query(Ecto.Repo.t, String.t, [term], Keyword.t) ::
{:ok, %{rows: nil | [tuple], num_rows: non_neg_integer}} | {:error, Exception.t}
def query(repo, sql, params, opts \\ []) do
query(repo, sql, map_params(params), fn x -> x end, opts)
end
defp query(repo, sql, params, mapper, opts) do
sql_call(repo, :execute, [sql], params, mapper, opts)
end
defp sql_call(repo, callback, args, params, mapper, opts) do
{pool, default_opts} = repo.__pool__
conn = get_conn(pool) || pool
opts = [decode_mapper: mapper] ++ with_log(repo, params, opts ++ default_opts)
args = args ++ [params, opts]
apply(repo.__sql__, callback, [conn | args])
end
defp map_params(params) do
Enum.map params, fn
%{__struct__: _} = value ->
{:ok, value} = Ecto.DataType.dump(value)
value
[_|_] = value ->
{:ok, value} = Ecto.DataType.dump(value)
value
value ->
value
end
end
## Worker
@pool_timeout 5_000
@timeout 15_000
@doc false
def __before_compile__(conn, env) do
config = Module.get_attribute(env.module, :config)
pool = Keyword.get(config, :pool, DBConnection.Poolboy)
if pool == Ecto.Adapters.SQL.Sandbox and config[:pool_size] == 1 do
IO.puts :stderr, "warning: setting the :pool_size to 1 for #{inspect env.module} " <>
"when using the Ecto.Adapters.SQL.Sandbox pool is deprecated and " <>
"won't work as expected. Please remove the :pool_size configuration " <>
"or set it to a reasonable number like 10"
end
pool_name = pool_name(env.module, config)
norm_config = normalize_config(config)
quote do
@doc false
def __sql__, do: unquote(conn)
@doc false
def __pool__, do: {unquote(pool_name), unquote(Macro.escape(norm_config))}
defoverridable [__pool__: 0]
end
end
defp normalize_config(config) do
config
|> Keyword.delete(:name)
|> Keyword.update(:pool, DBConnection.Poolboy, &normalize_pool/1)
|> Keyword.put_new(:timeout, @timeout)
|> Keyword.put_new(:pool_timeout, @pool_timeout)
end
defp normalize_pool(Ecto.Adapters.SQL.Sandbox),
do: DBConnection.Ownership
defp normalize_pool(pool),
do: pool
defp pool_name(module, config) do
Keyword.get(config, :pool_name, default_pool_name(module, config))
end
defp default_pool_name(repo, config) do
Module.concat(Keyword.get(config, :name, repo), Pool)
end
@doc false
def child_spec(connection, adapter, repo, opts) do
unless Code.ensure_loaded?(connection) do
raise """
could not find #{inspect connection}.
Please verify you have added #{inspect adapter} as a dependency:
{#{inspect adapter}, ">= 0.0.0"}
And remember to recompile Ecto afterwards by cleaning the current build:
mix deps.clean --build ecto
"""
end
# Check if the pool options should overriden
{pool_name, pool_opts} = case Keyword.fetch(opts, :pool) do
{:ok, pool} when pool != Ecto.Adapters.SQL.Sandbox ->
{pool_name(repo, opts), opts}
_ ->
repo.__pool__
end
opts = [name: pool_name] ++ Keyword.delete(opts, :pool) ++ pool_opts
opts =
if function_exported?(repo, :after_connect, 1) and not Keyword.has_key?(opts, :after_connect) do
IO.puts :stderr, "warning: #{inspect repo}.after_connect/1 is deprecated. If you want to " <>
"perform some action after connecting, please set after_connect: {module, fun, args}" <>
"in your repository configuration"
Keyword.put(opts, :after_connect, {repo, :after_connect, []})
else
opts
end
connection.child_spec(opts)
end
## Types
@doc false
def load_embed(type, value) do
Ecto.Type.load(type, value, fn
{:embed, _} = type, value -> load_embed(type, value)
type, value -> Ecto.Type.cast(type, value)
end)
end
@doc false
def dump_embed(type, value) do
Ecto.Type.dump(type, value, fn
{:embed, _} = type, value -> dump_embed(type, value)
_type, value -> {:ok, value}
end)
end
## Query
@doc false
def insert_all(repo, conn, prefix, source, header, rows, returning, opts) do
{rows, params} = unzip_inserts(header, rows)
sql = conn.insert(prefix, source, header, rows, returning)
%{rows: rows, num_rows: num} = query!(repo, sql, Enum.reverse(params), nil, opts)
{num, rows}
end
defp unzip_inserts(header, rows) do
Enum.map_reduce rows, [], fn fields, params ->
Enum.map_reduce header, params, fn key, acc ->
case :lists.keyfind(key, 1, fields) do
{^key, value} -> {key, [value|acc]}
false -> {nil, acc}
end
end
end
end
@doc false
def execute(repo, _meta, {:cache, update, {id, prepared}}, params, nil, opts) do
execute_and_cache(repo, id, update, prepared, params, nil, opts)
end
def execute(repo, %{fields: fields}, {:cache, update, {id, prepared}}, params, process, opts) do
mapper = &process_row(&1, process, fields)
execute_and_cache(repo, id, update, prepared, params, mapper, opts)
end
def execute(repo, _meta, {_, {_id, prepared_or_cached}}, params, nil, opts) do
%{rows: rows, num_rows: num} =
sql_call!(repo, :execute, [prepared_or_cached], params, nil, opts)
{num, rows}
end
def execute(repo, %{fields: fields}, {_, {_id, prepared_or_cached}}, params, process, opts) do
mapper = &process_row(&1, process, fields)
%{rows: rows, num_rows: num} =
sql_call!(repo, :execute, [prepared_or_cached], params, mapper, opts)
{num, rows}
end
defp execute_and_cache(repo, id, update, prepared, params, mapper, opts) do
name = "ecto_" <> Integer.to_string(id)
case sql_call(repo, :prepare_execute, [name, prepared], params, mapper, opts) do
{:ok, query, %{num_rows: num, rows: rows}} ->
update.({0, query})
{num, rows}
{:error, err} ->
raise err
end
end
defp sql_call!(repo, callback, args, params, mapper, opts) do
case sql_call(repo, callback, args, params, mapper, opts) do
{:ok, res} -> res
{:error, err} -> raise err
end
end
@doc false
def struct(repo, conn, sql, values, returning, opts) do
case query(repo, sql, values, fn x -> x end, opts) do
{:ok, %{rows: nil, num_rows: 1}} ->
{:ok, []}
{:ok, %{rows: [values], num_rows: 1}} ->
{:ok, Enum.zip(returning, values)}
{:ok, %{num_rows: 0}} ->
{:error, :stale}
{:error, err} ->
case conn.to_constraints(err) do
[] -> raise err
constraints -> {:invalid, constraints}
end
end
end
defp process_row(row, process, fields) do
Enum.map_reduce(fields, row, fn
{:&, _, [_, _, counter]} = field, acc ->
case split_and_not_nil(acc, counter, true, []) do
{nil, rest} -> {nil, rest}
{val, rest} -> {process.(field, val, nil), rest}
end
field, [h|t] ->
{process.(field, h, nil), t}
end) |> elem(0)
end
defp split_and_not_nil(rest, 0, true, _acc), do: {nil, rest}
defp split_and_not_nil(rest, 0, false, acc), do: {:lists.reverse(acc), rest}
defp split_and_not_nil([nil|t], count, all_nil?, acc) do
split_and_not_nil(t, count - 1, all_nil?, [nil|acc])
end
defp split_and_not_nil([h|t], count, _all_nil?, acc) do
split_and_not_nil(t, count - 1, false, [h|acc])
end
## Transactions
@doc false
def transaction(repo, opts, fun) do
{pool, default_opts} = repo.__pool__
opts = with_log(repo, [], opts ++ default_opts)
case get_conn(pool) do
nil -> do_transaction(pool, opts, fun)
conn -> DBConnection.transaction(conn, fn(_) -> fun.() end, opts)
end
end
defp do_transaction(pool, opts, fun) do
run = fn(conn) ->
try do
put_conn(pool, conn)
fun.()
after
delete_conn(pool)
end
end
DBConnection.transaction(pool, run, opts)
end
@doc false
def in_transaction?(repo) do
{pool, _} = repo.__pool__
!!get_conn(pool)
end
@doc false
def rollback(repo, value) do
{pool, _} = repo.__pool__
case get_conn(pool) do
nil -> raise "cannot call rollback outside of transaction"
conn -> DBConnection.rollback(conn, value)
end
end
## Log
defp with_log(repo, params, opts) do
case Keyword.pop(opts, :log, true) do
{true, opts} -> [log: &log(repo, params, &1)] ++ opts
{false, opts} -> opts
end
end
defp log(repo, params, entry) do
%{connection_time: query_time, decode_time: decode_time,
pool_time: queue_time, result: result, query: query} = entry
repo.__log__(%Ecto.LogEntry{query_time: query_time, decode_time: decode_time,
queue_time: queue_time, result: log_result(result),
params: params, query: String.Chars.to_string(query)})
end
defp log_result({:ok, _query, res}), do: {:ok, res}
defp log_result(other), do: other
## Connection helpers
defp put_conn(pool, conn) do
_ = Process.put(key(pool), conn)
:ok
end
defp get_conn(pool) do
Process.get(key(pool))
end
defp delete_conn(pool) do
_ = Process.delete(key(pool))
:ok
end
defp key(pool), do: {__MODULE__, pool}
end
| 30.90631 | 113 | 0.614266 |
7343c7c448baa84e11061208a2067b5e8c3b23d0 | 2,499 | exs | Elixir | test/controllers/user_controller_test.exs | jgunnink/phoenix_blog | 15017cdbca1da3ef5338c819b265cca3997aebe5 | [
"MIT"
] | 1 | 2016-10-07T14:36:21.000Z | 2016-10-07T14:36:21.000Z | test/controllers/user_controller_test.exs | jgunnink/phoenix_blog | 15017cdbca1da3ef5338c819b265cca3997aebe5 | [
"MIT"
] | null | null | null | test/controllers/user_controller_test.exs | jgunnink/phoenix_blog | 15017cdbca1da3ef5338c819b265cca3997aebe5 | [
"MIT"
] | null | null | null | defmodule Pxblog.UserControllerTest do
use Pxblog.ConnCase
alias Pxblog.User
@valid_create_attrs %{
email: "[email protected]",
username: "jgunnink",
password: "abc123abc",
password_confirmation: "abc123abc"
}
@valid_attrs %{email: "[email protected]", username: "jgunnink"}
@invalid_attrs %{}
test "lists all entries on index", %{conn: conn} do
conn = get conn, user_path(conn, :index)
assert html_response(conn, 200) =~ "Listing users"
end
test "renders form for new resources", %{conn: conn} do
conn = get conn, user_path(conn, :new)
assert html_response(conn, 200) =~ "New user"
end
test "creates resource and redirects when data is valid", %{conn: conn} do
conn = post conn, user_path(conn, :create), user: @valid_create_attrs
assert redirected_to(conn) == user_path(conn, :index)
assert Repo.get_by(User, @valid_attrs)
end
test "does not create resource and renders errors when data is invalid", %{conn: conn} do
conn = post conn, user_path(conn, :create), user: @invalid_attrs
assert html_response(conn, 200) =~ "New user"
end
test "shows chosen resource", %{conn: conn} do
user = Repo.insert! %User{}
conn = get conn, user_path(conn, :show, user)
assert html_response(conn, 200) =~ "Show user"
end
test "renders page not found when id is nonexistent", %{conn: conn} do
assert_error_sent 404, fn ->
get conn, user_path(conn, :show, -1)
end
end
test "renders form for editing chosen resource", %{conn: conn} do
user = Repo.insert! %User{}
conn = get conn, user_path(conn, :edit, user)
assert html_response(conn, 200) =~ "Edit user"
end
test "updates chosen resource and redirects when data is valid", %{conn: conn} do
user = Repo.insert! %User{}
conn = put conn, user_path(conn, :update, user), user: @valid_create_attrs
assert redirected_to(conn) == user_path(conn, :show, user)
assert Repo.get_by(User, @valid_attrs)
end
test "does not update chosen resource and renders errors when data is invalid", %{conn: conn} do
user = Repo.insert! %User{}
conn = put conn, user_path(conn, :update, user), user: @invalid_attrs
assert html_response(conn, 200) =~ "Edit user"
end
test "deletes chosen resource", %{conn: conn} do
user = Repo.insert! %User{}
conn = delete conn, user_path(conn, :delete, user)
assert redirected_to(conn) == user_path(conn, :index)
refute Repo.get(User, user.id)
end
end
| 34.232877 | 98 | 0.677871 |
7343ca067f60c7c4d93717e6b6b9e20577a1c99a | 1,449 | ex | Elixir | lib/covid_19_web/endpoint.ex | clarkware/live_view_covid_19 | dfe6ec0cb0ecf91ef30a58b549acc82d4614916a | [
"MIT"
] | 4 | 2020-03-20T13:37:51.000Z | 2020-03-27T23:53:55.000Z | lib/covid_19_web/endpoint.ex | clarkware/live_view_covid_19 | dfe6ec0cb0ecf91ef30a58b549acc82d4614916a | [
"MIT"
] | 2 | 2021-03-10T11:22:03.000Z | 2021-05-11T07:08:43.000Z | lib/covid_19_web/endpoint.ex | clarkware/live_view_covid_19 | dfe6ec0cb0ecf91ef30a58b549acc82d4614916a | [
"MIT"
] | null | null | null | defmodule Covid19Web.Endpoint do
use Phoenix.Endpoint, otp_app: :covid_19
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_covid_19_key",
signing_salt: "w+AVm9Ps"
]
socket "/socket", Covid19Web.UserSocket,
websocket: true,
longpoll: false
socket "/live", Phoenix.LiveView.Socket,
websocket: [connect_info: [session: @session_options]]
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :covid_19,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
plug Covid19Web.Router
end
| 28.411765 | 69 | 0.711525 |
7343ef96711ed68b487f447c0dd155853a0e627f | 1,728 | exs | Elixir | test/type_check/builtin/implements_protocol_test.exs | 0urobor0s/elixir-type_check | 6bac178f4a55798034b12c1dcfb219eabcfb35c0 | [
"MIT"
] | null | null | null | test/type_check/builtin/implements_protocol_test.exs | 0urobor0s/elixir-type_check | 6bac178f4a55798034b12c1dcfb219eabcfb35c0 | [
"MIT"
] | null | null | null | test/type_check/builtin/implements_protocol_test.exs | 0urobor0s/elixir-type_check | 6bac178f4a55798034b12c1dcfb219eabcfb35c0 | [
"MIT"
] | null | null | null | defmodule TypeCheck.Builtin.ImplementsProtocolTest do
use ExUnit.Case, async: true
use ExUnitProperties
import StreamData, only: []
require TypeCheck
import TypeCheck.Builtin
describe "ToStreamData implementation" do
property "impl(Enumerable) is able to generate enumerables" do
check all value <- TypeCheck.Protocols.ToStreamData.to_gen(impl(Enumerable)) do
assert is_integer(Enum.count(value))
end
end
property "impl(Collectable) is able to generate collectables" do
check all value <- TypeCheck.Protocols.ToStreamData.to_gen(impl(Collectable)) do
{_initial, collection_fun} = Collectable.into(value)
assert is_function(collection_fun, 2)
end
end
property "impl(String.Chars) is able to generate anything that indeed can be turned into a string" do
check all value <- TypeCheck.Protocols.ToStreamData.to_gen(impl(String.Chars)) do
res = to_string(value)
assert is_binary(res)
end
end
property "impl(Inspect) is able to generate any inspectable type (essentially anything?)" do
check all value <- TypeCheck.Protocols.ToStreamData.to_gen(impl(Inspect)) do
res = inspect(value)
assert is_binary(res)
end
end
test "raises for non-consolidated protocols" do
defprotocol ThisProtocolIsNotConsolidated do
def foo(_impl)
end
assert_raise(RuntimeError, "values of the type #TypeCheck.Type< impl(TypeCheck.Builtin.ImplementsProtocolTest.ThisProtocolIsNotConsolidated) > can only be generated when the protocol is consolidated.", fn ->
TypeCheck.Protocols.ToStreamData.to_gen(impl(ThisProtocolIsNotConsolidated))
end)
end
end
end
| 35.265306 | 213 | 0.721065 |
73442e9920c4289e3e85ab096874c1e3b03abdeb | 1,883 | ex | Elixir | lib/mix/lib/releases/runtime/pidfile.ex | happysalada/distillery | 2098604c9f12a26227d6bd794c62d5e62e609ad4 | [
"MIT"
] | null | null | null | lib/mix/lib/releases/runtime/pidfile.ex | happysalada/distillery | 2098604c9f12a26227d6bd794c62d5e62e609ad4 | [
"MIT"
] | null | null | null | lib/mix/lib/releases/runtime/pidfile.ex | happysalada/distillery | 2098604c9f12a26227d6bd794c62d5e62e609ad4 | [
"MIT"
] | null | null | null | defmodule Mix.Releases.Runtime.Pidfile do
@moduledoc """
This is a kernel process which will maintain a pidfile for the running node
"""
@doc false
# Will be called by `:init`
def start() do
# We don't need to link to `:init`, it will take care
# of linking to us, since we're being started as a kernel process
pid = spawn(__MODULE__, :init, [self(), Process.whereis(:init)])
receive do
{:ok, ^pid} = ok ->
ok
{:ignore, ^pid} ->
:ignore
{:error, ^pid, reason} ->
{:error, reason}
end
end
@doc false
def init(starter, parent) do
me = self()
case Application.get_env(:kernel, :pidfile, System.get_env("PIDFILE")) do
nil ->
# No config, so no need for this process
send(starter, {:ignore, me})
path when is_binary(path) ->
case File.write(path, :os.getpid()) do
:ok ->
# We've written the pid, so proceed
Process.flag(:trap_exit, true)
# Register
Process.register(me, __MODULE__)
# We're started!
send(starter, {:ok, me})
# Enter receive loop
loop(%{pidfile: path}, parent)
{:error, reason} ->
send(starter, {:error, me, {:invalid_pidfile, path, reason}})
end
path ->
send(starter, {:error, me, {:invalid_pidfile_config, path}})
end
end
defp loop(%{pidfile: path} = state, parent) do
receive do
{:EXIT, ^parent, reason} ->
terminate(reason, parent, state)
_ ->
loop(state, parent)
after
5_000 ->
if File.exists?(path) do
loop(state, parent)
else
:init.stop()
end
end
end
defp terminate(reason, _parent, %{pidfile: path}) do
# Cleanup pidfile
_ = File.rm(path)
exit(reason)
end
end
| 23.246914 | 77 | 0.549124 |
73445f821523a7509ea4dc66fd246fe7007adb43 | 23,769 | ex | Elixir | lib/elixir/lib/exception.ex | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/exception.ex | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/exception.ex | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | defmodule Exception do
@moduledoc """
Functions to format throw/catch/exit and exceptions.
Note that stacktraces in Elixir are updated on throw,
errors and exits. For example, at any given moment,
`System.stacktrace/0` will return the stacktrace for the
last throw/error/exit that occurred in the current process.
Do not rely on the particular format returned by the `format`
functions in this module. They may be changed in future releases
in order to better suit Elixir's tool chain. In other words,
by using the functions in this module it is guaranteed you will
format exceptions as in the current Elixir version being used.
"""
@typedoc "The exception type"
@type t :: %{__struct__: module, __exception__: true}
@typedoc "The kind handled by formatting functions"
@type kind :: :error | :exit | :throw | {:EXIT, pid}
@type stacktrace :: [stacktrace_entry]
@type stacktrace_entry ::
{module, atom, arity_or_args, location} |
{(... -> any), arity_or_args, location}
@typep arity_or_args :: non_neg_integer | list
@typep location :: Keyword.t
@callback exception(term) :: t
@callback message(t) :: String.t
@doc """
Returns `true` if the given `term` is an exception.
"""
def exception?(term)
def exception?(%{__struct__: struct, __exception__: true}) when is_atom(struct),
do: true
def exception?(_), do: false
@doc """
Gets the message for an `exception`.
"""
def message(%{__struct__: module, __exception__: true} = exception) when is_atom(module) do
try do
module.message(exception)
rescue
e ->
"got #{inspect e.__struct__} with message #{inspect message(e)} " <>
"while retrieving Exception.message/1 for #{inspect(exception)}"
else
x when is_binary(x) -> x
x ->
"got #{inspect(x)} " <>
"while retrieving Exception.message/1 for #{inspect(exception)} " <>
"(expected a string)"
end
end
@doc """
Normalizes an exception, converting Erlang exceptions
to Elixir exceptions.
It takes the `kind` spilled by `catch` as an argument and
normalizes only `:error`, returning the untouched payload
for others.
The third argument, a stacktrace, is optional. If it is
not supplied `System.stacktrace/0` will sometimes be used
to get additional information for the `kind` `:error`. If
the stacktrace is unknown and `System.stacktrace/0` would
not return the stacktrace corresponding to the exception
an empty stacktrace, `[]`, must be used.
"""
@spec normalize(:error, any, stacktrace) :: t
@spec normalize(kind, payload, stacktrace) :: payload when payload: var
# Generating a stacktrace is expensive, default to nil
# to only fetch it when needed.
def normalize(kind, payload, stacktrace \\ nil)
def normalize(:error, exception, stacktrace) do
if exception?(exception) do
exception
else
ErlangError.normalize(exception, stacktrace)
end
end
def normalize(_kind, payload, _stacktrace) do
payload
end
@doc """
Normalizes and formats any throw/error/exit.
The message is formatted and displayed in the same
format as used by Elixir's CLI.
The third argument, a stacktrace, is optional. If it is
not supplied `System.stacktrace/0` will sometimes be used
to get additional information for the `kind` `:error`. If
the stacktrace is unknown and `System.stacktrace/0` would
not return the stacktrace corresponding to the exception
an empty stacktrace, `[]`, must be used.
"""
@spec format_banner(kind, any, stacktrace | nil) :: String.t
def format_banner(kind, exception, stacktrace \\ nil)
def format_banner(:error, exception, stacktrace) do
exception = normalize(:error, exception, stacktrace)
"** (" <> inspect(exception.__struct__) <> ") " <> message(exception)
end
def format_banner(:throw, reason, _stacktrace) do
"** (throw) " <> inspect(reason)
end
def format_banner(:exit, reason, _stacktrace) do
"** (exit) " <> format_exit(reason, <<"\n ">>)
end
def format_banner({:EXIT, pid}, reason, _stacktrace) do
"** (EXIT from #{inspect pid}) " <> format_exit(reason, <<"\n ">>)
end
@doc """
Normalizes and formats throw/errors/exits and stacktraces.
It relies on `format_banner/3` and `format_stacktrace/1`
to generate the final format.
Note that `{:EXIT, pid}` do not generate a stacktrace though
(as they are retrieved as messages without stacktraces).
"""
@spec format(kind, any, stacktrace | nil) :: String.t
def format(kind, payload, stacktrace \\ nil)
def format({:EXIT, _} = kind, any, _) do
format_banner(kind, any)
end
def format(kind, payload, stacktrace) do
stacktrace = stacktrace || System.stacktrace
message = format_banner(kind, payload, stacktrace)
case stacktrace do
[] -> message
_ -> message <> "\n" <> format_stacktrace(stacktrace)
end
end
@doc """
Formats an exit. It returns a string.
Often there are errors/exceptions inside exits. Exits are often
wrapped by the caller and provide stacktraces too. This function
formats exits in a way to nicely show the exit reason, caller
and stacktrace.
"""
@spec format_exit(any) :: String.t
def format_exit(reason) do
format_exit(reason, <<"\n ">>)
end
# 2-Tuple could be caused by an error if the second element is a stacktrace.
defp format_exit({exception, maybe_stacktrace} = reason, joiner)
when is_list(maybe_stacktrace) and maybe_stacktrace !== [] do
try do
Enum.map(maybe_stacktrace, &format_stacktrace_entry/1)
else
formatted_stacktrace ->
# Assume a non-empty list formattable as stacktrace is a
# stacktrace, so exit was caused by an error.
message = "an exception was raised:" <> joiner <>
format_banner(:error, exception, maybe_stacktrace)
Enum.join([message | formatted_stacktrace], joiner <> <<" ">>)
catch
:error, _ ->
# Not a stacktrace, was an exit.
format_exit_reason(reason)
end
end
# :supervisor.start_link returns this error reason when it fails to init
# because a child's start_link raises.
defp format_exit({:shutdown,
{:failed_to_start_child, child, {:EXIT, reason}}}, joiner) do
format_start_child(child, reason, joiner)
end
# :supervisor.start_link returns this error reason when it fails to init
# because a child's start_link returns {:error, reason}.
defp format_exit({:shutdown, {:failed_to_start_child, child, reason}},
joiner) do
format_start_child(child, reason, joiner)
end
# 2-Tuple could be an exit caused by mfa if second element is mfa, args
# must be a list of arguments - max length 255 due to max arity.
defp format_exit({reason2, {mod, fun, args}} = reason, joiner)
when length(args) < 256 do
try do
format_mfa(mod, fun, args)
else
mfa ->
# Assume tuple formattable as an mfa is an mfa, so exit was caused by
# failed mfa.
"exited in: " <> mfa <> joiner <>
"** (EXIT) " <> format_exit(reason2, joiner <> <<" ">>)
catch
:error, _ ->
# Not an mfa, was an exit.
format_exit_reason(reason)
end
end
defp format_exit(reason, _joiner) do
format_exit_reason(reason)
end
defp format_exit_reason(:normal), do: "normal"
defp format_exit_reason(:shutdown), do: "shutdown"
defp format_exit_reason({:shutdown, reason}) do
"shutdown: #{inspect(reason)}"
end
defp format_exit_reason(:timeout), do: "time out"
defp format_exit_reason(:killed), do: "killed"
defp format_exit_reason(:noconnection), do: "no connection"
defp format_exit_reason(:noproc) do
"no process"
end
defp format_exit_reason({:nodedown, node_name}) when is_atom(node_name) do
"no connection to #{node_name}"
end
# :gen_server exit reasons
defp format_exit_reason({:already_started, pid}) do
"already started: " <> inspect(pid)
end
defp format_exit_reason({:bad_return_value, value}) do
"bad return value: " <> inspect(value)
end
defp format_exit_reason({:bad_call, request}) do
"bad call: " <> inspect(request)
end
defp format_exit_reason({:bad_cast, request}) do
"bad cast: " <> inspect(request)
end
# :supervisor.start_link error reasons
# If value is a list will be formatted by mfa exit in format_exit/1
defp format_exit_reason({:bad_return, {mod, :init, value}})
when is_atom(mod) do
format_mfa(mod, :init, 1) <> " returned a bad value: " <> inspect(value)
end
defp format_exit_reason({:bad_start_spec, start_spec}) do
"bad start spec: invalid children: " <> inspect(start_spec)
end
defp format_exit_reason({:start_spec, start_spec}) do
"bad start spec: " <> format_sup_spec(start_spec)
end
defp format_exit_reason({:supervisor_data, data}) do
"bad supervisor data: " <> format_sup_data(data)
end
defp format_exit_reason(reason), do: inspect(reason)
defp format_start_child(child, reason, joiner) do
"shutdown: failed to start child: " <> inspect(child) <> joiner <>
"** (EXIT) " <> format_exit(reason, joiner <> <<" ">>)
end
defp format_sup_data({:invalid_type, type}) do
"invalid type: " <> inspect(type)
end
defp format_sup_data({:invalid_strategy, strategy}) do
"invalid strategy: " <> inspect(strategy)
end
defp format_sup_data({:invalid_intensity, intensity}) do
"invalid intensity: " <> inspect(intensity)
end
defp format_sup_data({:invalid_period, period}) do
"invalid period: " <> inspect(period)
end
defp format_sup_data(other), do: inspect(other)
defp format_sup_spec({:invalid_child_spec, child_spec}) do
"invalid child spec: " <> inspect(child_spec)
end
defp format_sup_spec({:invalid_child_type, type}) do
"invalid child type: " <> inspect(type)
end
defp format_sup_spec({:invalid_mfa, mfa}) do
"invalid mfa: " <> inspect(mfa)
end
defp format_sup_spec({:invalid_restart_type, restart}) do
"invalid restart type: " <> inspect(restart)
end
defp format_sup_spec({:invalid_shutdown, shutdown}) do
"invalid shutdown: " <> inspect(shutdown)
end
defp format_sup_spec({:invalid_module, mod}) do
"invalid module: " <> inspect(mod)
end
defp format_sup_spec({:invalid_modules, modules}) do
"invalid modules: " <> inspect(modules)
end
defp format_sup_spec(other), do: inspect(other)
@doc """
Receives a stacktrace entry and formats it into a string.
"""
@spec format_stacktrace_entry(stacktrace_entry) :: String.t
def format_stacktrace_entry(entry)
# From Macro.Env.stacktrace
def format_stacktrace_entry({module, :__MODULE__, 0, location}) do
format_location(location) <> inspect(module) <> " (module)"
end
# From :elixir_compiler_*
def format_stacktrace_entry({_module, :__MODULE__, 1, location}) do
format_location(location) <> "(module)"
end
# From :elixir_compiler_*
def format_stacktrace_entry({_module, :__FILE__, 1, location}) do
format_location(location) <> "(file)"
end
def format_stacktrace_entry({module, fun, arity, location}) do
format_application(module) <> format_location(location) <> format_mfa(module, fun, arity)
end
def format_stacktrace_entry({fun, arity, location}) do
format_location(location) <> format_fa(fun, arity)
end
defp format_application(module) do
if app = Application.get_application(module) do
"(" <> Atom.to_string(app) <> ") "
else
""
end
end
@doc """
Formats the stacktrace.
A stacktrace must be given as an argument. If not, the stacktrace
is retrieved from `Process.info/2`.
"""
def format_stacktrace(trace \\ nil) do
trace = trace || case Process.info(self, :current_stacktrace) do
{:current_stacktrace, t} -> Enum.drop(t, 3)
end
case trace do
[] -> "\n"
s -> " " <> Enum.map_join(s, "\n ", &format_stacktrace_entry(&1)) <> "\n"
end
end
@doc """
Receives an anonymous function and arity and formats it as
shown in stacktraces. The arity may also be a list of arguments.
## Examples
Exception.format_fa(fn -> nil end, 1)
#=> "#Function<...>/1"
"""
def format_fa(fun, arity) when is_function(fun) do
"#{inspect fun}#{format_arity(arity)}"
end
@doc """
Receives a module, fun and arity and formats it
as shown in stacktraces. The arity may also be a list
of arguments.
## Examples
iex> Exception.format_mfa Foo, :bar, 1
"Foo.bar/1"
iex> Exception.format_mfa Foo, :bar, []
"Foo.bar()"
iex> Exception.format_mfa nil, :bar, []
"nil.bar()"
Anonymous functions are reported as -func/arity-anonfn-count-,
where func is the name of the enclosing function. Convert to
"anonymous fn in func/arity"
"""
def format_mfa(module, fun, arity) when is_atom(module) and is_atom(fun) do
fun =
case inspect(fun) do
":" <> fun -> fun
fun -> fun
end
case match?("\"-" <> _, fun) and String.split(fun, "-") do
[ "\"", outer_fun, "fun", _count, "\"" ] ->
"anonymous fn#{format_arity(arity)} in #{inspect module}.#{outer_fun}"
_ ->
"#{inspect module}.#{fun}#{format_arity(arity)}"
end
end
defp format_arity(arity) when is_list(arity) do
inspected = for x <- arity, do: inspect(x)
"(#{Enum.join(inspected, ", ")})"
end
defp format_arity(arity) when is_integer(arity) do
"/" <> Integer.to_string(arity)
end
@doc """
Formats the given `file` and `line` as shown in stacktraces.
If any of the values are `nil`, they are omitted.
## Examples
iex> Exception.format_file_line("foo", 1)
"foo:1:"
iex> Exception.format_file_line("foo", nil)
"foo:"
iex> Exception.format_file_line(nil, nil)
""
"""
def format_file_line(file, line) do
format_file_line(file, line, "")
end
defp format_file_line(file, line, suffix) do
if file do
if line && line != 0 do
"#{file}:#{line}:#{suffix}"
else
"#{file}:#{suffix}"
end
else
""
end
end
defp format_location(opts) when is_list(opts) do
format_file_line Keyword.get(opts, :file), Keyword.get(opts, :line), " "
end
end
# Some exceptions implement "message/1" instead of "exception/1" mostly
# for bootstrap reasons. It is recommended for applications to implement
# "exception/1" instead of "message/1" as described in "defexception/1"
# docs.
defmodule RuntimeError do
defexception message: "runtime error"
end
defmodule ArgumentError do
defexception message: "argument error"
end
defmodule ArithmeticError do
defexception []
def message(_) do
"bad argument in arithmetic expression"
end
end
defmodule SystemLimitError do
defexception []
def message(_) do
"a system limit has been reached"
end
end
defmodule SyntaxError do
defexception [file: nil, line: nil, description: "syntax error"]
def message(exception) do
Exception.format_file_line(Path.relative_to_cwd(exception.file), exception.line) <>
" " <> exception.description
end
end
defmodule TokenMissingError do
defexception [file: nil, line: nil, description: "expression is incomplete"]
def message(exception) do
Exception.format_file_line(Path.relative_to_cwd(exception.file), exception.line) <>
" " <> exception.description
end
end
defmodule CompileError do
defexception [file: nil, line: nil, description: "compile error"]
def message(exception) do
Exception.format_file_line(Path.relative_to_cwd(exception.file), exception.line) <>
" " <> exception.description
end
end
defmodule BadFunctionError do
defexception [term: nil]
def message(exception) do
"expected a function, got: #{inspect(exception.term)}"
end
end
defmodule BadStructError do
defexception [struct: nil, term: nil]
def message(exception) do
"expected a struct named #{inspect(exception.struct)}, got: #{inspect(exception.term)}"
end
end
defmodule BadMapError do
defexception [term: nil]
def message(exception) do
"expected a map, got: #{inspect(exception.term)}"
end
end
defmodule MatchError do
defexception [term: nil]
def message(exception) do
"no match of right hand side value: #{inspect(exception.term)}"
end
end
defmodule CaseClauseError do
defexception [term: nil]
def message(exception) do
"no case clause matching: #{inspect(exception.term)}"
end
end
defmodule CondClauseError do
defexception []
def message(_exception) do
"no cond clause evaluated to a true value"
end
end
defmodule TryClauseError do
defexception [term: nil]
def message(exception) do
"no try clause matching: #{inspect(exception.term)}"
end
end
defmodule BadArityError do
defexception [function: nil, args: nil]
def message(exception) do
fun = exception.function
args = exception.args
insp = Enum.map_join(args, ", ", &inspect/1)
{:arity, arity} = :erlang.fun_info(fun, :arity)
"#{inspect(fun)} with arity #{arity} called with #{count(length(args), insp)}"
end
defp count(0, _insp), do: "no arguments"
defp count(1, insp), do: "1 argument (#{insp})"
defp count(x, insp), do: "#{x} arguments (#{insp})"
end
defmodule UndefinedFunctionError do
defexception [module: nil, function: nil, arity: nil, reason: nil]
def message(%{reason: nil, module: module, function: function, arity: arity} = e) do
cond do
is_nil(function) or is_nil(arity) ->
"undefined function"
not is_nil(module) and :code.is_loaded(module) === false ->
message(%{e | reason: :"module could not be loaded"})
true ->
message(%{e | reason: :"function not exported"})
end
end
def message(%{reason: :"module could not be loaded", module: module, function: function, arity: arity}) do
"undefined function " <> Exception.format_mfa(module, function, arity) <>
" (module #{inspect module} is not available)"
end
def message(%{reason: :"function not exported", module: module, function: function, arity: arity}) do
"undefined function " <> Exception.format_mfa(module, function, arity)
end
def message(%{reason: :"function not available", module: module, function: function, arity: arity}) do
"nil." <> fa = Exception.format_mfa(nil, function, arity)
"undefined function " <> Exception.format_mfa(module, function, arity) <>
" (function #{fa} is not available)"
end
def message(%{reason: reason, module: module, function: function, arity: arity}) do
"undefined function " <> Exception.format_mfa(module, function, arity) <> " (#{reason})"
end
end
defmodule FunctionClauseError do
defexception [module: nil, function: nil, arity: nil]
def message(exception) do
if exception.function do
formatted = Exception.format_mfa exception.module, exception.function, exception.arity
"no function clause matching in #{formatted}"
else
"no function clause matches"
end
end
end
defmodule Code.LoadError do
defexception [:file, :message]
def exception(opts) do
file = Keyword.fetch!(opts, :file)
%Code.LoadError{message: "could not load #{file}", file: file}
end
end
defmodule Protocol.UndefinedError do
defexception [protocol: nil, value: nil, description: nil]
def message(exception) do
msg = "protocol #{inspect exception.protocol} not implemented for #{inspect exception.value}"
if exception.description do
msg <> ", " <> exception.description
else
msg
end
end
end
defmodule KeyError do
defexception key: nil, term: nil
def message(exception) do
msg = "key #{inspect exception.key} not found"
if exception.term != nil do
msg <> " in: #{inspect exception.term}"
else
msg
end
end
end
defmodule UnicodeConversionError do
defexception [:encoded, :message]
def exception(opts) do
%UnicodeConversionError{
encoded: Keyword.fetch!(opts, :encoded),
message: "#{Keyword.fetch!(opts, :kind)} #{detail Keyword.fetch!(opts, :rest)}"
}
end
defp detail(rest) when is_binary(rest) do
"encoding starting at #{inspect rest}"
end
defp detail([h|_]) when is_integer(h) do
"code point #{h}"
end
defp detail([h|_]) do
detail(h)
end
end
defmodule Enum.OutOfBoundsError do
defexception []
def message(_) do
"out of bounds error"
end
end
defmodule Enum.EmptyError do
defexception []
def message(_) do
"empty error"
end
end
defmodule File.Error do
defexception [reason: nil, action: "", path: nil]
def message(exception) do
formatted = IO.iodata_to_binary(:file.format_error(exception.reason))
"could not #{exception.action} #{inspect(exception.path)}: #{formatted}"
end
end
defmodule File.CopyError do
defexception [reason: nil, action: "", source: nil, destination: nil, on: nil]
def message(exception) do
formatted = IO.iodata_to_binary(:file.format_error(exception.reason))
location = if on = exception.on, do: ". #{on}", else: ""
"could not #{exception.action} from #{inspect(exception.source)} to " <>
"#{inspect(exception.destination)}#{location}: #{formatted}"
end
end
defmodule ErlangError do
defexception [original: nil]
def message(exception) do
"erlang error: #{inspect(exception.original)}"
end
@doc false
def normalize(:badarg, _stacktrace) do
%ArgumentError{}
end
def normalize(:badarith, _stacktrace) do
%ArithmeticError{}
end
def normalize(:system_limit, _stacktrace) do
%SystemLimitError{}
end
def normalize(:cond_clause, _stacktrace) do
%CondClauseError{}
end
def normalize({:badarity, {fun, args}}, _stacktrace) do
%BadArityError{function: fun, args: args}
end
def normalize({:badfun, term}, _stacktrace) do
%BadFunctionError{term: term}
end
def normalize({:badstruct, struct, term}, _stacktrace) do
%BadStructError{struct: struct, term: term}
end
def normalize({:badmatch, term}, _stacktrace) do
%MatchError{term: term}
end
def normalize({:badmap, term}, _stacktrace) do
%BadMapError{term: term}
end
def normalize({:badkey, key}, stacktrace) do
term =
case stacktrace || :erlang.get_stacktrace do
[{Map, :get_and_update!, [map, _, _], _}|_] -> map
[{Map, :update!, [map, _, _], _}|_] -> map
[{:maps, :update, [_, _, map], _}|_] -> map
[{:maps, :get, [_, map], _}|_] -> map
_ -> nil
end
%KeyError{key: key, term: term}
end
def normalize({:badkey, key, map}, _stacktrace) do
%KeyError{key: key, term: map}
end
def normalize({:case_clause, term}, _stacktrace) do
%CaseClauseError{term: term}
end
def normalize({:try_clause, term}, _stacktrace) do
%TryClauseError{term: term}
end
def normalize(:undef, stacktrace) do
stacktrace = stacktrace || :erlang.get_stacktrace
{mod, fun, arity} = from_stacktrace(stacktrace)
%UndefinedFunctionError{module: mod, function: fun, arity: arity}
end
def normalize(:function_clause, stacktrace) do
{mod, fun, arity} = from_stacktrace(stacktrace || :erlang.get_stacktrace)
%FunctionClauseError{module: mod, function: fun, arity: arity}
end
def normalize({:badarg, payload}, _stacktrace) do
%ArgumentError{message: "argument error: #{inspect(payload)}"}
end
def normalize(other, _stacktrace) do
%ErlangError{original: other}
end
defp from_stacktrace([{module, function, args, _}|_]) when is_list(args) do
{module, function, length(args)}
end
defp from_stacktrace([{module, function, arity, _}|_]) do
{module, function, arity}
end
defp from_stacktrace(_) do
{nil, nil, nil}
end
end
| 27.8 | 108 | 0.675964 |
734467956f6fdd38fe143a2178426a2bf40e19e5 | 919 | exs | Elixir | test/cog/chat/hipchat/templates/embedded/alias_delete_test.exs | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 1,003 | 2016-02-23T17:21:12.000Z | 2022-02-20T14:39:35.000Z | test/cog/chat/hipchat/templates/embedded/alias_delete_test.exs | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 906 | 2016-02-22T22:54:19.000Z | 2022-03-11T15:19:43.000Z | test/cog/chat/hipchat/templates/embedded/alias_delete_test.exs | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 95 | 2016-02-23T13:42:31.000Z | 2021-11-30T14:39:55.000Z | defmodule Cog.Chat.HipChat.Templates.Embedded.AliasDeleteTest do
use Cog.TemplateCase
test "alias-delete template" do
data = %{"results" => [%{"visibility" => "user", "name" => "awesome_alias"}]}
expected = "Deleted alias 'user:awesome_alias'"
assert_rendered_template(:hipchat, :embedded, "alias-delete", data, expected)
end
test "alias-delete with multiple inputs" do
data = %{"results" => [%{"visibility" => "user", "name" => "awesome_alias"},
%{"visibility" => "user", "name" => "another_awesome_alias"},
%{"visibility" => "site", "name" => "wow_neat"}]}
expected = """
Deleted alias 'user:awesome_alias'<br/>
Deleted alias 'user:another_awesome_alias'<br/>
Deleted alias 'site:wow_neat'
""" |> String.replace("\n", "")
assert_rendered_template(:hipchat, :embedded, "alias-delete", data, expected)
end
end
| 36.76 | 88 | 0.620239 |
7344753f2af756de471029ae6597ec61db4f07de | 1,601 | exs | Elixir | elixir/flatten-array/flatten_array_comments.exs | macborowy/exercism | c5d45e074e81b946a82a340b2730e0d2732b7e0a | [
"MIT"
] | null | null | null | elixir/flatten-array/flatten_array_comments.exs | macborowy/exercism | c5d45e074e81b946a82a340b2730e0d2732b7e0a | [
"MIT"
] | null | null | null | elixir/flatten-array/flatten_array_comments.exs | macborowy/exercism | c5d45e074e81b946a82a340b2730e0d2732b7e0a | [
"MIT"
] | null | null | null | defmodule Flattener do
@doc """
Accept a list and return the list flattened without nil values.
## Examples
iex> Flattener.flatten([1, [2], 3, nil])
[1,2,3]
iex> Flattener.flatten([nil, nil])
[]
"""
@spec flatten(list) :: list
def flatten(list) do
do_flatten(list)
end
@doc """
Function header
- list - list to flatten
- acc - accumulator of flattened list values
- tails - accumulator of tails dropped during nesting
"""
defp do_flatten(list, acc \\ [], tails \\ [])
# stop clause
defp do_flatten([], acc, []), do: Enum.reverse(acc)
# call do_flatten with first available tail in tails
defp do_flatten([], acc, [h | t]), do: do_flatten(h, acc, t)
@doc """
Function needs to examine if list head (h) is list (nested list) or not.
- if head is nested list, then call do_flatten with this head and append list tail (t) to tails
(when head will be flattened this tail will be next value to flatten).
- if head is not a nested list, function needs to check if it's not a nil value.
- if head is nil, skip this head and call do_flatten with list tail (t) as first argument.
- if head is not nil, append head to accumulator and call do_flatten with list tail (t) as first argument.
"""
defp do_flatten([h | t], acc, tails) do
case is_list(h) do
true -> do_flatten(h, acc, [t | tails])
false ->
case h do
nil -> do_flatten(t, acc, tails) # skip nil head
_ -> do_flatten(t, [h | acc], tails) # append head and check list tail
end
end
end
end
| 30.788462 | 110 | 0.630856 |
734494e7dbdfe9c1794565d8132d182fa746c31b | 238 | ex | Elixir | test/support/normalize_and_push_strategy.ex | njwest/bamboo | 024286443a4e1aae57cbaa87dbcafe62c9a5755a | [
"MIT"
] | null | null | null | test/support/normalize_and_push_strategy.ex | njwest/bamboo | 024286443a4e1aae57cbaa87dbcafe62c9a5755a | [
"MIT"
] | null | null | null | test/support/normalize_and_push_strategy.ex | njwest/bamboo | 024286443a4e1aae57cbaa87dbcafe62c9a5755a | [
"MIT"
] | 1 | 2018-09-05T09:17:27.000Z | 2018-09-05T09:17:27.000Z | defmodule Bamboo.NormalizeAndPushStrategy do
use ExMachina.Strategy, function_name: :normalize_and_push
def handle_normalize_and_push(email, _opts) do
email |> Bamboo.Mailer.normalize_addresses |> Bamboo.SentEmail.push
end
end
| 29.75 | 71 | 0.810924 |
7344a959f623f226626d1b5f746265700a54ab42 | 1,836 | ex | Elixir | clients/analytics/lib/google_api/analytics/v3/model/ad_words_account.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/analytics/lib/google_api/analytics/v3/model/ad_words_account.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/analytics/lib/google_api/analytics/v3/model/ad_words_account.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Analytics.V3.Model.AdWordsAccount do
@moduledoc """
JSON template for an AdWords account.
## Attributes
- autoTaggingEnabled (boolean()): True if auto-tagging is enabled on the AdWords account. Read-only after the insert operation. Defaults to: `null`.
- customerId (String.t): Customer ID. This field is required when creating an AdWords link. Defaults to: `null`.
- kind (String.t): Resource type for AdWords account. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:autoTaggingEnabled => any(),
:customerId => any(),
:kind => any()
}
field(:autoTaggingEnabled)
field(:customerId)
field(:kind)
end
defimpl Poison.Decoder, for: GoogleApi.Analytics.V3.Model.AdWordsAccount do
def decode(value, options) do
GoogleApi.Analytics.V3.Model.AdWordsAccount.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Analytics.V3.Model.AdWordsAccount do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34 | 150 | 0.733115 |
7344b7ed7f2e6bf41f4b422d86897da2aadbf5fe | 76 | exs | Elixir | lesson_08/demo/i18n/test/i18n_web/views/layout_view_test.exs | martijnmeeldijk/ip_major | 867f09975aa8db0b308081216ace639c5677446b | [
"BSD-3-Clause"
] | 1 | 2021-09-22T09:56:35.000Z | 2021-09-22T09:56:35.000Z | lesson_08/demo/i18n/test/i18n_web/views/layout_view_test.exs | martijnmeeldijk/ip_major | 867f09975aa8db0b308081216ace639c5677446b | [
"BSD-3-Clause"
] | 7 | 2020-03-14T19:30:29.000Z | 2022-02-27T01:20:40.000Z | lesson_08/demo/i18n/test/i18n_web/views/layout_view_test.exs | martijnmeeldijk/ip_major | 867f09975aa8db0b308081216ace639c5677446b | [
"BSD-3-Clause"
] | 11 | 2020-02-13T14:52:45.000Z | 2020-08-03T12:18:56.000Z | defmodule I18nWeb.LayoutViewTest do
use I18nWeb.ConnCase, async: true
end
| 19 | 35 | 0.815789 |
7344e6d5ba86ff64104a0fbdefb9b19a02db6698 | 9,346 | ex | Elixir | lib/extreme/persistent_subscription.ex | the-mikedavis/extreme | c467bba50f96df4a37cce561252efbac9b0d351a | [
"MIT"
] | null | null | null | lib/extreme/persistent_subscription.ex | the-mikedavis/extreme | c467bba50f96df4a37cce561252efbac9b0d351a | [
"MIT"
] | null | null | null | lib/extreme/persistent_subscription.ex | the-mikedavis/extreme | c467bba50f96df4a37cce561252efbac9b0d351a | [
"MIT"
] | null | null | null | defmodule Extreme.PersistentSubscription do
@moduledoc """
An asynchronous subscription strategy.
Other subscription methods require stream positions to be persisted by the
client (e.g. in `:dets` or PostgreSQL). Persistent Subscription is a
subscription strategy in which details about backpressure, buffer sizes, and
stream positions are all held by the EventStore (server).
In a persistent subscription, all communication is asynchronous. When an
event is received and processed, it must be acknowledged as processed by
`ack/3` in order to be considered by the server as processed. The server
stores knowledge of which events have been processed by means of checkpoints,
so listeners which use persistent subscriptions do not store stream positions
themselves.
The system of `ack/3`s and `nack/5`s allows listeners to handle events in
unconventional ways:
* concurrent processing: events may be handled by multiple processors at the
same time.
* out of order processing: events may be handled in any order.
* retry: events which are `nack/5`-ed with the `:retry` action, and events
which do not receive acknowledgement via `ack/3` are retried.
* message parking: if an event is not acknowledged and reaches its maximum
retry count, the message is parked in a parked messages queue. This
prevents head-of-line blocking typical of other subscription patterns.
* competing consumers: multiple consumers may process events without
collaboration or gossip between the consumers themselves.
Persistent subscriptions are started with
`c:Extreme.connect_to_persistent_subscription/4` expect a `cast` of each event
in the form of `{:on_event, event, correlation_id}`
A Persistent Subscription must exist before it can be connected to.
Persistent Subscriptions can be created by sending the
`Extreme.Messages.CreatePersistentSubscription` message via
`c:Extreme.execute/3`, by the HTTP API, or in the EventStore dashboard.
## Example
```elixir
defmodule MyPersistentListener do
use GenServer
alias Extreme.PersistentSubscription
def start_link(opts) do
GenServer.start_link(__MODULE__, opts)
end
def init(opts) do
{:ok, opts}
end
def subscribe(listener_proc), do: GenServer.cast(listener_proc, :subscribe)
def handle_cast(:subscribe, state) do
{:ok, subscription_pid} =
MyExtremeClientModule.connect_to_persistent_subscription(
self(),
opts.stream,
opts.group,
opts.allowed_in_flight_messages
)
{:noreply, Map.put(state, :subscription_pid, subscription_pid)}
end
def handle_cast({:on_event, event, correlation_id}, state) do
# .. do the real processing here ..
:ok = PersistentSubscription.ack(state.subscription_pid, event, correlation_id)
{:noreply, state}
end
def handle_call(:unsubscribe, _from, state) do
:unsubscribed = MyExtremeClientModule.unsubscribe(state.subscription_pid)
{:reply, :ok, state}
end
def handle_info(_, state), do: {:noreply, state}
end
```
"""
use GenServer
require Logger
alias Extreme.SharedSubscription, as: Shared
alias Extreme.RequestManager
alias Extreme.Messages, as: Msg
defmodule State do
@moduledoc false
defstruct ~w(
base_name
correlation_id
subscriber
stream
group
allowed_in_flight_messages
status
subscription_id
)a
end
@typedoc """
An event received from a persistent subscription.
"""
@type event() :: %Extreme.Messages.ResolvedIndexedEvent{}
@typedoc """
An event ID.
Either from the `:link` or `:event`, depending on if the event is from a
projection stream or a normal stream (respectively).
"""
@type event_id() :: binary()
@doc false
def start_link(
base_name,
correlation_id,
subscriber,
stream,
group,
allowed_in_flight_messages
) do
GenServer.start_link(
__MODULE__,
{base_name, correlation_id, subscriber, stream, group, allowed_in_flight_messages}
)
end
@doc """
Acknowledges that an event or set of events have been successfully processed.
`ack/3` takes any of the following for event ID:
* a full event, as given in the `{:on_event, event, correlation_id}` cast
* the `event_id` of an event (either from its `:link` or `:event`, depending
on if the event comes from a projection or a normal stream, respectively)
* a list of either sort
`correlation_id` comes from the `:on_event` cast.
## Example
def handle_cast({:on_event, event, correlation_id}, state) do
# .. do some processing ..
# when the processing completes successfully:
:ok = Extreme.PersistentSubscription.ack(state.subscription_pid, event, correlation_id)
{:noreply, state}
end
"""
@spec ack(pid(), event() | event_id() | [event() | event_id()], binary()) :: :ok
def ack(subscription, event, correlation_id) when is_map(event) or is_binary(event) do
ack(subscription, [event], correlation_id)
end
def ack(subscription, events, correlation_id) when is_list(events) do
GenServer.cast(subscription, {:ack, events, correlation_id})
end
@doc """
Acknowledges that an event or set of events could not be handled.
See `ack/3` for information on `event` and `correlation_id`.
`action` can be any of the following
* `:unknown`
* `:park`
* `:retry`
* `:skip`
* `:stop`
The `:park` action sets aside the event in the Parked Messages queue, which
may be replayed via [the HTTP
API](https://eventstore.com/docs/http-api/competing-consumers/index.html#replaying-parked-messages)
or by button click in the EventStore Persistent Subscriptions dashboard.
When an event reaches the max retry count configured by the
`:max_retry_count` field in `Extreme.Messages.CreatePersistentSubscription`,
the event is parked.
## Example
def handle_cast({:on_event, event, correlation_id}, state) do
# .. do some processing ..
# in the case that the processing fails and should be retried:
:ok = Extreme.PersistentSubscription.nack(state.subscription_pid, event, correlation_id, :retry)
{:noreply, state}
end
"""
@spec nack(
pid(),
event() | event_id() | [event() | event_id()],
binary(),
:unknown | :park | :retry | :skip | :stop,
String.t()
) :: :ok
def nack(subscription, event, correlation_id, action, message \\ "")
def nack(subscription, event, correlation_id, action, message)
when is_map(event) or is_binary(event) do
nack(subscription, [event], correlation_id, action, message)
end
def nack(subscription, events, correlation_id, action, message) when is_list(events) do
GenServer.cast(subscription, {:nack, events, correlation_id, action, message})
end
@doc false
@impl true
def init({base_name, correlation_id, subscriber, stream, group, allowed_in_flight_messages}) do
state = %State{
base_name: base_name,
correlation_id: correlation_id,
subscriber: subscriber,
stream: stream,
group: group,
allowed_in_flight_messages: allowed_in_flight_messages,
status: :initialized
}
GenServer.cast(self(), :subscribe)
{:ok, state}
end
@impl true
def handle_call(:unsubscribe, from, state) do
:ok = Shared.unsubscribe(from, state)
{:noreply, state}
end
@impl true
def handle_cast(:subscribe, state) do
Msg.ConnectToPersistentSubscription.new(
subscription_id: state.group,
event_stream_id: state.stream,
allowed_in_flight_messages: state.allowed_in_flight_messages
)
|> cast_request_manager(state.base_name, state.correlation_id)
{:noreply, state}
end
def handle_cast({:process_push, fun}, state) do
Shared.process_push(fun, state)
end
def handle_cast({:ack, events, correlation_id}, state) do
Msg.PersistentSubscriptionAckEvents.new(
subscription_id: state.subscription_id,
processed_event_ids: event_ids(events)
)
|> cast_request_manager(state.base_name, correlation_id)
{:noreply, state}
end
def handle_cast({:nack, events, correlation_id, action, message}, state) do
Msg.PersistentSubscriptionNakEvents.new(
subscription_id: state.subscription_id,
processed_event_ids: event_ids(events),
action: action,
message: message
)
|> cast_request_manager(state.base_name, correlation_id)
{:noreply, state}
end
def handle_cast(:unsubscribe, state) do
Msg.UnsubscribeFromStream.new()
|> cast_request_manager(state.base_name, state.correlation_id)
{:noreply, state}
end
defp cast_request_manager(message, base_name, correlation_id) do
base_name
|> RequestManager._name()
|> GenServer.cast({:execute, correlation_id, message})
end
defp event_ids(events) when is_list(events) do
Enum.map(events, &event_id/1)
end
defp event_id(event_id) when is_binary(event_id), do: event_id
defp event_id(%Msg.ResolvedIndexedEvent{link: %Msg.EventRecord{event_id: event_id}}),
do: event_id
defp event_id(%Msg.ResolvedIndexedEvent{event: %Msg.EventRecord{event_id: event_id}}),
do: event_id
end
| 30.442997 | 104 | 0.702654 |
73456bf2db134ac2c53eb815de615d27a4b8836b | 920 | exs | Elixir | mix.exs | dmitryzuev/httpotion_cache | 3d5408cadd920cb8ca6bbc4b08368d295b8c7bdf | [
"MIT"
] | 2 | 2016-10-20T11:47:14.000Z | 2017-02-10T04:34:14.000Z | mix.exs | dmitryzuev/httpotion_cache | 3d5408cadd920cb8ca6bbc4b08368d295b8c7bdf | [
"MIT"
] | 1 | 2017-02-10T04:35:50.000Z | 2017-02-10T04:35:50.000Z | mix.exs | dmitryzuev/httpotion_cache | 3d5408cadd920cb8ca6bbc4b08368d295b8c7bdf | [
"MIT"
] | null | null | null | defmodule HTTPotion.Cache.Mixfile do
use Mix.Project
def project do
[ app: :httpotion_cache,
name: "HTTPotion.Cache",
source_url: "https://github.com/dmitryzuev/httpotion_cache",
version: "0.1.0",
elixir: "~> 1.1",
docs: [ extras: ["README.md"] ],
description: description(),
deps: deps(),
package: package() ]
end
def application do
[ applications: [ :httpotion, :cachex ] ]
end
defp description do
"""
Extension to HTTPotion for caching http requests.
"""
end
defp deps do
[ {:httpotion, "~> 3.0"},
{:cachex, "~> 2.0"},
{:ex_doc, "~> 0.12", only: [:dev, :test, :docs]} ]
end
defp package do
[ files: [ "lib", "mix.exs", "README.md", "LICENSE" ],
maintainers: [ "Dmitry Zuev" ],
licenses: [ "LICENSE" ],
links: %{ "GitHub" => "https://github.com/dmitryzuev/httpotion_cache" } ]
end
end
| 23.589744 | 79 | 0.568478 |
7345718c7da15e0f2d4b2127290c347af483ceb7 | 947 | exs | Elixir | mix.exs | GuyBransgrove/commander | 44acac3f07c72a75d4e9f5fe61ba8d6f0323ab49 | [
"MIT"
] | null | null | null | mix.exs | GuyBransgrove/commander | 44acac3f07c72a75d4e9f5fe61ba8d6f0323ab49 | [
"MIT"
] | null | null | null | mix.exs | GuyBransgrove/commander | 44acac3f07c72a75d4e9f5fe61ba8d6f0323ab49 | [
"MIT"
] | null | null | null | defmodule Commander.MixProject do
use Mix.Project
def project do
[
app: :commander,
version: "0.1.0",
elixir: "~> 1.7",
start_permanent: Mix.env() == :prod,
deps: deps(),
description: description(),
package: package()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"},
{:ex_doc, "~> 0.18", only: :dev}
]
end
defp description() do
"Desc"
end
defp package do
[
files: ["lib", "mix.exs", "README*", "LICENSE*"],
maintainers: ["Guy Bransgrove"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/GuyBransgrove/commander"}
]
end
end
| 21.044444 | 88 | 0.563886 |
7345a6829e64d7b8e6a15f191b69537c3d054921 | 2,695 | ex | Elixir | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p4beta1_safe_search_annotation.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p4beta1_safe_search_annotation.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p4beta1_safe_search_annotation.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1SafeSearchAnnotation do
@moduledoc """
Set of features pertaining to the image, computed by computer vision
methods over safe-search verticals (for example, adult, spoof, medical,
violence).
## Attributes
* `adult` (*type:* `String.t`, *default:* `nil`) - Represents the adult content likelihood for the image. Adult content may
contain elements such as nudity, pornographic images or cartoons, or
sexual activities.
* `medical` (*type:* `String.t`, *default:* `nil`) - Likelihood that this is a medical image.
* `racy` (*type:* `String.t`, *default:* `nil`) - Likelihood that the request image contains racy content. Racy content may
include (but is not limited to) skimpy or sheer clothing, strategically
covered nudity, lewd or provocative poses, or close-ups of sensitive
body areas.
* `spoof` (*type:* `String.t`, *default:* `nil`) - Spoof likelihood. The likelihood that an modification
was made to the image's canonical version to make it appear
funny or offensive.
* `violence` (*type:* `String.t`, *default:* `nil`) - Likelihood that this image contains violent content.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:adult => String.t(),
:medical => String.t(),
:racy => String.t(),
:spoof => String.t(),
:violence => String.t()
}
field(:adult)
field(:medical)
field(:racy)
field(:spoof)
field(:violence)
end
defimpl Poison.Decoder,
for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1SafeSearchAnnotation do
def decode(value, options) do
GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1SafeSearchAnnotation.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1SafeSearchAnnotation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.917808 | 127 | 0.709091 |
7345b5069343f757c2dfb53beaaee8fc51c353e0 | 10,652 | ex | Elixir | lib/mix/lib/mix/utils.ex | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/utils.ex | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/utils.ex | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Utils do
@moduledoc """
Utilities used throughout Mix and tasks.
## Conversions
This module handles two types of conversions:
* From command names to module names, i.e. how the command
`deps.get` translates to `Deps.Get` and vice-versa;
* From underscore to CamelCase, i.e. how the file path
`my_project` translates to `MyProject`;
"""
@doc """
Gets the mix home. It defaults to `~/.mix` unless the
`MIX_HOME` environment variable is set.
"""
def mix_home do
System.get_env("MIX_HOME") || Path.expand("~/.mix")
end
@doc """
Gets all extra paths defined in the environment variable
`MIX_PATH`. `MIX_PATH` may contain multiple paths. If on Windows,
those paths should be separated by `;`, if on unix systems,
use `:`.
"""
def mix_paths do
if path = System.get_env("MIX_PATH") do
String.split(path, path_separator)
else
[]
end
end
defp path_separator do
case :os.type do
{ :win32, _ } -> ";"
{ :unix, _ } -> ":"
end
end
@doc """
Takes a `command` name and attempts to load a module
with the command name converted to a module name
in the given `at` scope.
Returns `{ :module, module }` in case a module
exists and is loaded, `{ :error, reason }` otherwise.
## Examples
Mix.Utils.command_to_module("compile", Mix.Tasks)
#=> { :module, Mix.Tasks.Compile }
"""
def command_to_module(command, at // Elixir) do
module = Module.concat(at, command_to_module_name(command))
Code.ensure_loaded(module)
end
@doc """
Returns `true` if any of the `sources` are stale
compared to the given `targets`.
"""
def stale?(sources, targets) do
Enum.any? stale_stream(sources, targets)
end
@doc """
Extract all stale `sources` compared to the given `targets`.
"""
def extract_stale(_sources, []), do: []
def extract_stale(sources, targets) do
stale_stream(sources, targets) |> Enum.to_list
end
defp stale_stream(sources, targets) do
modified_target = targets |> Enum.map(&last_modified(&1)) |> Enum.min
Stream.filter(sources, fn(source) ->
source_mtime(source) > modified_target
end)
end
defp source_mtime({ _, { { _, _, _ }, { _, _, _ } } = source }) do
source
end
defp source_mtime(source) do
last_modified(source)
end
defp last_modified(path) do
case File.stat(path) do
{ :ok, File.Stat[mtime: mtime] } -> mtime
{ :error, _ } -> { { 1970, 1, 1 }, { 0, 0, 0 } }
end
end
@doc %S"""
Reads the given file as a manifest and returns each entry
as a list.
A manifest is a tabular file where each line is a row
and each entry in a row is separated by "\t". The first
entry must always be a path to a compiled artifact.
In case there is no manifest file, returns an empty list.
"""
def read_manifest(file) do
case File.read(file) do
{ :ok, contents } -> String.split(contents, "\n")
{ :error, _ } -> []
end
end
@doc """
Writes a manifest file with the given `entries` list.
"""
def write_manifest(file, entries) do
Path.dirname(file) |> File.mkdir_p!
File.write!(file, Enum.join(entries, "\n"))
end
@doc """
Extract files from a list of paths.
If any of the paths is a directory, the directory is looped
recursively searching for the given extensions or the given pattern.
When looking up directories, files starting with "." are ignored.
"""
def extract_files(paths, exts_or_pattern)
def extract_files(paths, exts) when is_list(exts) do
extract_files(paths, "*.{#{Enum.join(exts, ",")}}")
end
def extract_files(paths, pattern) do
files = Enum.concat(lc path inlist paths do
if File.regular?(path), do: [path], else: Path.wildcard("#{path}/**/#{pattern}")
end)
files |> exclude_files |> Enum.uniq
end
defp exclude_files(files) do
filter = fn(x) -> not match?("." <> _, Path.basename(x)) end
Enum.filter files, filter
end
@doc """
Merges two configs recursively, merging keyword lists
and concatenating normal lists.
"""
def config_merge(old, new) do
Keyword.merge(old, new, fn(_, x, y) ->
if is_list(x) and is_list(y) do
if Keyword.keyword?(x) and Keyword.keyword?(y) do
config_merge(x, y)
else
x ++ y
end
else
y
end
end)
end
@doc """
Converts the given atom or binary to underscore format.
If an atom is given, it is assumed to be an Elixir module,
so it is converted to a binary and then processed.
## Examples
Mix.Utils.underscore "FooBar" #=> "foo_bar"
Mix.Utils.underscore "Foo.Bar" #=> "foo/bar"
Mix.Utils.underscore Foo.Bar #=> "foo/bar"
In general, `underscore` can be thought of as the reverse of
`camelize`, however, in some cases formatting may be lost:
Mix.Utils.underscore "SAPExample" #=> "sap_example"
Mix.Utils.camelize "sap_example" #=> "SapExample"
"""
def underscore(atom) when is_atom(atom) do
"Elixir." <> rest = atom_to_binary(atom)
underscore(rest)
end
def underscore(<<h, t :: binary>>) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<h, t, rest :: binary>>, _) when h in ?A..?Z and not t in ?A..?Z do
<<?_, to_lower_char(h), t>> <> do_underscore(rest, t)
end
defp do_underscore(<<h, t :: binary>>, prev) when h in ?A..?Z and not prev in ?A..?Z do
<<?_, to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<?-, t :: binary>>, _) do
<<?_>> <> do_underscore(t, ?-)
end
defp do_underscore(<<?., t :: binary>>, _) do
<<?/>> <> underscore(t)
end
defp do_underscore(<<h, t :: binary>>, _) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<>>, _) do
<<>>
end
@doc """
Converts the given string to CamelCase format.
## Examples
Mix.Utils.camelize "foo_bar" #=> "FooBar"
"""
def camelize(<<?_, t :: binary>>) do
camelize(t)
end
def camelize(<<h, t :: binary>>) do
<<to_upper_char(h)>> <> do_camelize(t)
end
defp do_camelize(<<?_, ?_, t :: binary>>) do
do_camelize(<< ?_, t :: binary >>)
end
defp do_camelize(<<?_, h, t :: binary>>) when h in ?a..?z do
<<to_upper_char(h)>> <> do_camelize(t)
end
defp do_camelize(<<?_>>) do
<<>>
end
defp do_camelize(<<?/, t :: binary>>) do
<<?.>> <> camelize(t)
end
defp do_camelize(<<h, t :: binary>>) do
<<h>> <> do_camelize(t)
end
defp do_camelize(<<>>) do
<<>>
end
@doc """
Takes a module and converts it to a command. The nesting
argument can be given in order to remove the nesting of a
module.
## Examples
module_name_to_command(Mix.Tasks.Compile, 2)
#=> "compile"
module_name_to_command("Mix.Tasks.Compile.Elixir", 2)
#=> "compile.elixir"
"""
def module_name_to_command(module, nesting // 0)
def module_name_to_command(module, nesting) when is_atom(module) do
module_name_to_command(inspect(module), nesting)
end
def module_name_to_command(module, nesting) do
t = Regex.split(%r/\./, to_string(module))
t |> Enum.drop(nesting) |> Enum.map(&first_to_lower(&1)) |> Enum.join(".")
end
@doc """
Takes a command and converts it to the module name format.
## Examples
command_to_module_name("compile.elixir")
#=> "Compile.Elixir"
"""
def command_to_module_name(s) do
Regex.split(%r/\./, to_string(s)) |>
Enum.map(&first_to_upper(&1)) |>
Enum.join(".")
end
defp first_to_upper(<<s, t :: binary>>), do: <<to_upper_char(s)>> <> t
defp first_to_upper(<<>>), do: <<>>
defp first_to_lower(<<s, t :: binary>>), do: <<to_lower_char(s)>> <> t
defp first_to_lower(<<>>), do: <<>>
defp to_upper_char(char) when char in ?a..?z, do: char - 32
defp to_upper_char(char), do: char
defp to_lower_char(char) when char in ?A..?Z, do: char + 32
defp to_lower_char(char), do: char
@doc """
Symlink directory `source` to `target` or copy it recursively
in case symlink fails. Expect source and target to be absolute
paths as it generates a relative symlink.
"""
def symlink_or_copy(source, target) do
if File.exists?(source) do
source_list = String.to_char_list!(source)
case :file.read_link(target) do
{ :ok, ^source_list } ->
:ok
{ :ok, _ } ->
File.rm!(target)
do_symlink_or_copy(source, target)
{ :error, :enoent } ->
do_symlink_or_copy(source, target)
{ :error, _ } ->
File.rm_rf!(target)
do_symlink_or_copy(source, target)
end
else
{ :error, :enoent }
end
end
defp do_symlink_or_copy(source, target) do
symlink_source = make_relative_path(source, target)
case :file.make_symlink(symlink_source, target) do
:ok -> :ok
{ :error, _ } -> File.cp_r!(source, Path.join(target, "."))
end
end
# Make a relative path in between two paths.
# Expects both paths to be fully expanded.
defp make_relative_path(source, target) do
do_make_relative_path(Path.split(source), Path.split(target))
end
defp do_make_relative_path([h|t1], [h|t2]) do
do_make_relative_path(t1, t2)
end
defp do_make_relative_path(source, target) do
base = List.duplicate("..", max(length(target) - 1, 0))
Path.join(base ++ source)
end
@doc """
Opens and reads content from either a URL or a local filesystem path.
Used by tasks like `local.install` and `local.rebar` that support
installation either from a URL or a local file.
Raises if the given path is not a url, nor a file or if the
file or url are invalid.
"""
def read_path!(path) do
cond do
url?(path) -> read_url(path)
file?(path) -> read_file(path)
:else -> raise Mix.Error, message: "Expected #{path} to be a url or a local file path"
end
end
defp read_file(path) do
File.read!(path)
end
defp read_url(path) do
if URI.parse(path).scheme == "https" do
:ssl.start
end
:inets.start
case :httpc.request(:binary.bin_to_list(path)) do
{ :ok, { { _, status, _ }, _, body } } when status in 200..299 ->
:binary.list_to_bin(body)
{ :ok, { { _, status, _ }, _, _ } } ->
raise Mix.Error, message: "Could not access url #{path}, got status: #{status}"
{ :error, reason } ->
raise Mix.Error, message: "Could not access url #{path}, error: #{inspect reason}"
end
end
defp file?(path) do
File.regular?(path)
end
defp url?(path) do
URI.parse(path).scheme in ["http", "https"]
end
end
| 25.917275 | 98 | 0.62392 |
7345dcebbe732e0b517a90e0507b4025d3b2b326 | 535 | ex | Elixir | packages/api/lib/api_web/views/changeset_view.ex | ErikSkare/Meower | ddc5c75004111aa64587994f27085bba1c5bd377 | [
"MIT"
] | 1 | 2022-02-10T20:08:10.000Z | 2022-02-10T20:08:10.000Z | packages/api/lib/api_web/views/changeset_view.ex | ErikSkare/Meower | ddc5c75004111aa64587994f27085bba1c5bd377 | [
"MIT"
] | null | null | null | packages/api/lib/api_web/views/changeset_view.ex | ErikSkare/Meower | ddc5c75004111aa64587994f27085bba1c5bd377 | [
"MIT"
] | null | null | null | defmodule ApiWeb.ChangesetView do
use ApiWeb, :view
@doc """
Traverses and translates changeset errors.
See `Ecto.Changeset.traverse_errors/2` and
`ApiWeb.ErrorHelpers.translate_error/1` for more details.
"""
def translate_errors(changeset) do
Ecto.Changeset.traverse_errors(changeset, &translate_error/1)
end
def render("error.json", %{changeset: changeset}) do
# When encoded, the changeset returns its errors
# as a JSON object. So we just pass it forward.
translate_errors(changeset)
end
end
| 26.75 | 65 | 0.736449 |
7345e9eb0d7f9de9bd4ec80a843d7727edafce08 | 295 | exs | Elixir | priv/repo/migrations/20180223040733_create_states.exs | roger120981/planet | a662006551d11427c08cf6cdbacd37d377bcd9c5 | [
"MIT"
] | 1 | 2019-04-01T19:14:16.000Z | 2019-04-01T19:14:16.000Z | priv/repo/migrations/20180223040733_create_states.exs | roger120981/planet | a662006551d11427c08cf6cdbacd37d377bcd9c5 | [
"MIT"
] | null | null | null | priv/repo/migrations/20180223040733_create_states.exs | roger120981/planet | a662006551d11427c08cf6cdbacd37d377bcd9c5 | [
"MIT"
] | 1 | 2019-03-24T01:50:48.000Z | 2019-03-24T01:50:48.000Z | defmodule Excommerce.Repo.Migrations.CreateStates do
use Ecto.Migration
def change do
create table(:states) do
add :abbr, :string
add :name, :string
add :country_id, references(:countries)
timestamps()
end
create index(:states, [:country_id])
end
end
| 19.666667 | 52 | 0.667797 |
73460d7be2dcd04c93944275325cc0370087819f | 130 | exs | Elixir | test/mazedrivers_bot_test.exs | videumcodeup/mazedrivers-bot-elixir | e4fe41e4d59fd57a2b9e1221c3bfdc45033bb040 | [
"MIT"
] | null | null | null | test/mazedrivers_bot_test.exs | videumcodeup/mazedrivers-bot-elixir | e4fe41e4d59fd57a2b9e1221c3bfdc45033bb040 | [
"MIT"
] | null | null | null | test/mazedrivers_bot_test.exs | videumcodeup/mazedrivers-bot-elixir | e4fe41e4d59fd57a2b9e1221c3bfdc45033bb040 | [
"MIT"
] | null | null | null | defmodule MazedriversBotTest do
use ExUnit.Case
doctest MazedriversBot
test "the truth" do
assert 1 + 1 == 2
end
end
| 14.444444 | 31 | 0.707692 |
73461ec5d0bec2deca77a7dd20c85f1c70bc5273 | 11,021 | ex | Elixir | debian/manpage.xml.ex | sethalves/snow2-client | b70c3ca5522a666a71a4c8992f771d5faaceccd6 | [
"BSD-3-Clause"
] | 15 | 2015-01-12T09:08:07.000Z | 2021-10-03T10:03:28.000Z | debian/manpage.xml.ex | sethalves/snow2-client | b70c3ca5522a666a71a4c8992f771d5faaceccd6 | [
"BSD-3-Clause"
] | 1 | 2018-08-13T23:12:46.000Z | 2018-08-14T17:46:42.000Z | debian/manpage.xml.ex | sethalves/snow2-client | b70c3ca5522a666a71a4c8992f771d5faaceccd6 | [
"BSD-3-Clause"
] | 3 | 2015-01-18T05:34:04.000Z | 2018-08-13T21:56:59.000Z | <?xml version='1.0' encoding='UTF-8'?>
<!DOCTYPE refentry PUBLIC "-//OASIS//DTD DocBook XML V4.5//EN"
"http://www.oasis-open.org/docbook/xml/4.5/docbookx.dtd" [
<!--
`xsltproc -''-nonet \
-''-param man.charmap.use.subset "0" \
-''-param make.year.ranges "1" \
-''-param make.single.year.ranges "1" \
/usr/share/xml/docbook/stylesheet/docbook-xsl/manpages/docbook.xsl \
manpage.xml'
A manual page <package>.<section> will be generated. You may view the
manual page with: nroff -man <package>.<section> | less'. A typical entry
in a Makefile or Makefile.am is:
DB2MAN = /usr/share/sgml/docbook/stylesheet/xsl/docbook-xsl/manpages/docbook.xsl
XP = xsltproc -''-nonet -''-param man.charmap.use.subset "0"
manpage.1: manpage.xml
$(XP) $(DB2MAN) $<
The xsltproc binary is found in the xsltproc package. The XSL files are in
docbook-xsl. A description of the parameters you can use can be found in the
docbook-xsl-doc-* packages. Please remember that if you create the nroff
version in one of the debian/rules file targets (such as build), you will need
to include xsltproc and docbook-xsl in your Build-Depends control field.
Alternatively use the xmlto command/package. That will also automatically
pull in xsltproc and docbook-xsl.
Notes for using docbook2x: docbook2x-man does not automatically create the
AUTHOR(S) and COPYRIGHT sections. In this case, please add them manually as
<refsect1> ... </refsect1>.
To disable the automatic creation of the AUTHOR(S) and COPYRIGHT sections
read /usr/share/doc/docbook-xsl/doc/manpages/authors.html. This file can be
found in the docbook-xsl-doc-html package.
Validation can be done using: `xmllint -''-noout -''-valid manpage.xml`
General documentation about man-pages and man-page-formatting:
man(1), man(7), http://www.tldp.org/HOWTO/Man-Page/
-->
<!-- Fill in your name for FIRSTNAME and SURNAME. -->
<!ENTITY dhfirstname "FIRSTNAME">
<!ENTITY dhsurname "SURNAME">
<!-- dhusername could also be set to "&dhfirstname; &dhsurname;". -->
<!ENTITY dhusername "Seth Alves">
<!ENTITY dhemail "[email protected]">
<!-- SECTION should be 1-8, maybe w/ subsection other parameters are
allowed: see man(7), man(1) and
http://www.tldp.org/HOWTO/Man-Page/q2.html. -->
<!ENTITY dhsection "SECTION">
<!-- TITLE should be something like "User commands" or similar (see
http://www.tldp.org/HOWTO/Man-Page/q2.html). -->
<!ENTITY dhtitle "snow2-client User Manual">
<!ENTITY dhucpackage "SNOW2-CLIENT">
<!ENTITY dhpackage "snow2-client">
]>
<refentry>
<refentryinfo>
<title>&dhtitle;</title>
<productname>&dhpackage;</productname>
<authorgroup>
<author>
<firstname>&dhfirstname;</firstname>
<surname>&dhsurname;</surname>
<contrib>Wrote this manpage for the Debian system.</contrib>
<address>
<email>&dhemail;</email>
</address>
</author>
</authorgroup>
<copyright>
<year>2007</year>
<holder>&dhusername;</holder>
</copyright>
<legalnotice>
<para>This manual page was written for the Debian system
(and may be used by others).</para>
<para>Permission is granted to copy, distribute and/or modify this
document under the terms of the GNU General Public License,
Version 2 or (at your option) any later version published by
the Free Software Foundation.</para>
<para>On Debian systems, the complete text of the GNU General Public
License can be found in
<filename>/usr/share/common-licenses/GPL</filename>.</para>
</legalnotice>
</refentryinfo>
<refmeta>
<refentrytitle>&dhucpackage;</refentrytitle>
<manvolnum>&dhsection;</manvolnum>
</refmeta>
<refnamediv>
<refname>&dhpackage;</refname>
<refpurpose>program to do something</refpurpose>
</refnamediv>
<refsynopsisdiv>
<cmdsynopsis>
<command>&dhpackage;</command>
<!-- These are several examples, how syntaxes could look -->
<arg choice="plain"><option>-e <replaceable>this</replaceable></option></arg>
<arg choice="opt"><option>--example=<parameter>that</parameter></option></arg>
<arg choice="opt">
<group choice="req">
<arg choice="plain"><option>-e</option></arg>
<arg choice="plain"><option>--example</option></arg>
</group>
<replaceable class="option">this</replaceable>
</arg>
<arg choice="opt">
<group choice="req">
<arg choice="plain"><option>-e</option></arg>
<arg choice="plain"><option>--example</option></arg>
</group>
<group choice="req">
<arg choice="plain"><replaceable>this</replaceable></arg>
<arg choice="plain"><replaceable>that</replaceable></arg>
</group>
</arg>
</cmdsynopsis>
<cmdsynopsis>
<command>&dhpackage;</command>
<!-- Normally the help and version options make the programs stop
right after outputting the requested information. -->
<group choice="opt">
<arg choice="plain">
<group choice="req">
<arg choice="plain"><option>-h</option></arg>
<arg choice="plain"><option>--help</option></arg>
</group>
</arg>
<arg choice="plain">
<group choice="req">
<arg choice="plain"><option>-v</option></arg>
<arg choice="plain"><option>--version</option></arg>
</group>
</arg>
</group>
</cmdsynopsis>
</refsynopsisdiv>
<refsect1 id="description">
<title>DESCRIPTION</title>
<para>This manual page documents briefly the
<command>&dhpackage;</command> and <command>bar</command>
commands.</para>
<para>This manual page was written for the Debian distribution
because the original program does not have a manual page.
Instead, it has documentation in the GNU <citerefentry>
<refentrytitle>info</refentrytitle>
<manvolnum>1</manvolnum>
</citerefentry> format; see below.</para>
<para><command>&dhpackage;</command> is a program that...</para>
</refsect1>
<refsect1 id="options">
<title>OPTIONS</title>
<para>The program follows the usual GNU command line syntax,
with long options starting with two dashes (`-'). A summary of
options is included below. For a complete description, see the
<citerefentry>
<refentrytitle>info</refentrytitle>
<manvolnum>1</manvolnum>
</citerefentry> files.</para>
<variablelist>
<!-- Use the variablelist.term.separator and the
variablelist.term.break.after parameters to
control the term elements. -->
<varlistentry>
<term><option>-e <replaceable>this</replaceable></option></term>
<term><option>--example=<replaceable>that</replaceable></option></term>
<listitem>
<para>Does this and that.</para>
</listitem>
</varlistentry>
<varlistentry>
<term><option>-h</option></term>
<term><option>--help</option></term>
<listitem>
<para>Show summary of options.</para>
</listitem>
</varlistentry>
<varlistentry>
<term><option>-v</option></term>
<term><option>--version</option></term>
<listitem>
<para>Show version of program.</para>
</listitem>
</varlistentry>
</variablelist>
</refsect1>
<refsect1 id="files">
<title>FILES</title>
<variablelist>
<varlistentry>
<term><filename>/etc/foo.conf</filename></term>
<listitem>
<para>The system-wide configuration file to control the
behaviour of <application>&dhpackage;</application>. See
<citerefentry>
<refentrytitle>foo.conf</refentrytitle>
<manvolnum>5</manvolnum>
</citerefentry> for further details.</para>
</listitem>
</varlistentry>
<varlistentry>
<term><filename>${HOME}/.foo.conf</filename></term>
<listitem>
<para>The per-user configuration file to control the
behaviour of <application>&dhpackage;</application>. See
<citerefentry>
<refentrytitle>foo.conf</refentrytitle>
<manvolnum>5</manvolnum>
</citerefentry> for further details.</para>
</listitem>
</varlistentry>
</variablelist>
</refsect1>
<refsect1 id="environment">
<title>ENVIRONMENT</title>
<variablelist>
<varlistentry>
<term><envar>FOO_CONF</envar></term>
<listitem>
<para>If used, the defined file is used as configuration
file (see also <xref linkend="files"/>).</para>
</listitem>
</varlistentry>
</variablelist>
</refsect1>
<refsect1 id="diagnostics">
<title>DIAGNOSTICS</title>
<para>The following diagnostics may be issued
on <filename class="devicefile">stderr</filename>:</para>
<variablelist>
<varlistentry>
<term><errortext>Bad configuration file. Exiting.</errortext></term>
<listitem>
<para>The configuration file seems to contain a broken configuration
line. Use the <option>--verbose</option> option, to get more info.
</para>
</listitem>
</varlistentry>
</variablelist>
<para><command>&dhpackage;</command> provides some return codes, that can
be used in scripts:</para>
<segmentedlist>
<segtitle>Code</segtitle>
<segtitle>Diagnostic</segtitle>
<seglistitem>
<seg><errorcode>0</errorcode></seg>
<seg>Program exited successfully.</seg>
</seglistitem>
<seglistitem>
<seg><errorcode>1</errorcode></seg>
<seg>The configuration file seems to be broken.</seg>
</seglistitem>
</segmentedlist>
</refsect1>
<refsect1 id="bugs">
<!-- Or use this section to tell about upstream BTS. -->
<title>BUGS</title>
<para>The program is currently limited to only work
with the <package>foobar</package> library.</para>
<para>The upstreams <acronym>BTS</acronym> can be found
at <ulink url="http://bugzilla.foo.tld"/>.</para>
</refsect1>
<refsect1 id="see_also">
<title>SEE ALSO</title>
<!-- In alpabetical order. -->
<para><citerefentry>
<refentrytitle>bar</refentrytitle>
<manvolnum>1</manvolnum>
</citerefentry>, <citerefentry>
<refentrytitle>baz</refentrytitle>
<manvolnum>1</manvolnum>
</citerefentry>, <citerefentry>
<refentrytitle>foo.conf</refentrytitle>
<manvolnum>5</manvolnum>
</citerefentry></para>
<para>The programs are documented fully by <citetitle>The Rise and
Fall of a Fooish Bar</citetitle> available via the <citerefentry>
<refentrytitle>info</refentrytitle>
<manvolnum>1</manvolnum>
</citerefentry> system.</para>
</refsect1>
</refentry>
| 37.743151 | 84 | 0.632338 |
734683ccefa18cf8d0dd39fe271ec9f84784a510 | 2,363 | ex | Elixir | apps/re_web/lib/graphql/resolvers/developments.ex | ruby2elixir/emcasa-backend | 70d7f4f233555417941ffa6ada84cf8740c21dd2 | [
"MIT"
] | 4 | 2019-11-01T16:29:31.000Z | 2020-10-10T21:20:12.000Z | apps/re_web/lib/graphql/resolvers/developments.ex | eduardomartines/emcasa-backend | 70d7f4f233555417941ffa6ada84cf8740c21dd2 | [
"MIT"
] | null | null | null | apps/re_web/lib/graphql/resolvers/developments.ex | eduardomartines/emcasa-backend | 70d7f4f233555417941ffa6ada84cf8740c21dd2 | [
"MIT"
] | 5 | 2019-11-04T21:25:45.000Z | 2020-02-13T23:49:36.000Z | defmodule ReWeb.Resolvers.Developments do
@moduledoc false
alias Re.{
Addresses,
Development,
Developments,
Developments.Typologies
}
import Absinthe.Resolution.Helpers, only: [on_load: 2]
def index(_params, _context) do
developments = Developments.all()
{:ok, developments}
end
def show(%{uuid: uuid}, _context) do
Developments.get(uuid)
end
def insert(%{input: development_params}, %{context: %{current_user: current_user}}) do
with :ok <-
Bodyguard.permit(Developments, :insert_development, current_user, development_params),
{:ok, address} <- get_address(development_params),
{:ok, development} <- Developments.insert(development_params, address) do
{:ok, development}
else
{:error, _, error, _} -> {:error, error}
error -> error
end
end
def per_listing(listing, _params, %{context: %{loader: loader}}) do
loader
|> Dataloader.load(Developments, :development, listing)
|> on_load(fn loader ->
{:ok, Dataloader.get(loader, Developments, :development, listing)}
end)
end
def typologies(development, _params, %{context: %{loader: loader}}) do
loader
|> Dataloader.load(Typologies, Development, development.uuid)
|> on_load(fn loader ->
%{typologies: typologies} =
Dataloader.get(loader, Typologies, Development, development.uuid)
{:ok, typologies}
end)
end
def update(%{uuid: uuid, input: development_params}, %{
context: %{current_user: current_user}
}) do
with {:ok, development} <- Developments.get(uuid),
:ok <- Bodyguard.permit(Developments, :update_development, current_user, development),
{:ok, address} <- get_address(development_params),
{:ok, development} <- Developments.update(development, development_params, address) do
{:ok, development}
end
end
def import_from_orulo(%{external_id: id}, %{context: %{current_user: current_user}}) do
with :ok <- Bodyguard.permit(Developments, :import_development_from_orulo, current_user),
{:ok, _job} <- ReIntegrations.Orulo.get_building_payload(id) do
{:ok, %{message: "Development syncronization scheduled!"}}
end
end
defp get_address(%{address_id: id}), do: Addresses.get_by_id(id)
defp get_address(_), do: {:error, :bad_request}
end
| 31.506667 | 97 | 0.670334 |
73469138f282d6eebbba7e72ba63764ce71d89dc | 2,777 | exs | Elixir | apps/site/config/config.exs | paulswartz/dotcom | 73e43e7c61afd96b1928608ce8316a7ed0eb1440 | [
"MIT"
] | null | null | null | apps/site/config/config.exs | paulswartz/dotcom | 73e43e7c61afd96b1928608ce8316a7ed0eb1440 | [
"MIT"
] | null | null | null | apps/site/config/config.exs | paulswartz/dotcom | 73e43e7c61afd96b1928608ce8316a7ed0eb1440 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# Configures the endpoint
config :site, SiteWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "yK6hUINZWlq04EPu3SJjAHNDYgka8MZqgXZykF+AQ2PvWs4Ua4IELdFl198aMvw0",
render_errors: [accepts: ~w(html), layout: {SiteWeb.LayoutView, "app.html"}],
pubsub: [name: Site.PubSub, adapter: Phoenix.PubSub.PG2]
config :phoenix, :gzippable_exts, ~w(.txt .html .js .css .svg)
# Configures Elixir's Logger
config :logger, :console,
format: "$date $time $metadata[$level] $message\n",
metadata: [:request_id]
# Include referrer in Logster request log
config :logster, :allowed_headers, ["referer"]
config :site, SiteWeb.ViewHelpers, google_tag_manager_id: System.get_env("GOOGLE_TAG_MANAGER_ID")
config :laboratory,
features: [
{:events_hub_redesign, "Events Hub Redesign (Feb. 2021)",
"Changes to the event listings and the event pages as part of the 🤝 Public Engagement epic"}
],
cookie: [
# one month,
max_age: 3600 * 24 * 30,
http_only: true
]
config :site, Site.BodyTag, mticket_header: "x-mticket"
# Centralize Error reporting
config :sentry,
dsn: System.get_env("SENTRY_DSN") || "",
environment_name:
(case System.get_env("SENTRY_REPORTING_ENV") do
nil -> Mix.env()
env -> String.to_existing_atom(env)
end),
enable_source_code_context: false,
root_source_code_path: File.cwd!(),
included_environments: [:prod],
json_library: Poison,
filter: Site.SentryFilter
config :site, :former_mbta_site, host: "https://old.mbta.com"
config :site, tile_server_url: "https://mbta-map-tiles-dev.s3.amazonaws.com"
config :site, OldSiteFileController,
response_fn: {SiteWeb.OldSiteFileController, :send_file},
gtfs_s3_bucket: {:system, "GTFS_S3_BUCKET", "mbta-gtfs-s3"}
config :site, StaticFileController, response_fn: {SiteWeb.StaticFileController, :send_file}
config :util,
router_helper_module: {:ok, SiteWeb.Router.Helpers},
endpoint: {:ok, SiteWeb.Endpoint}
config :hammer,
backend: {Hammer.Backend.ETS, [expiry_ms: 60_000 * 60 * 4, cleanup_interval_ms: 60_000 * 10]}
config :recaptcha,
public_key: {:system, "RECAPTCHA_PUBLIC_KEY"},
secret: {:system, "RECAPTCHA_PRIVATE_KEY"}
config :site, :react,
source_path: Path.join(File.cwd!(), "/apps/site/react_renderer/"),
build_path: Path.join(File.cwd!(), "/apps/site/react_renderer/dist/app.js")
config :site,
allow_indexing: false
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 33.059524 | 97 | 0.734966 |
7346cba6f91a8917ee3960e53257b0377b6cf01c | 2,976 | ex | Elixir | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/analyze_iam_policy_longrunning_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/analyze_iam_policy_longrunning_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/analyze_iam_policy_longrunning_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudAsset.V1.Model.AnalyzeIamPolicyLongrunningRequest do
@moduledoc """
A request message for AssetService.AnalyzeIamPolicyLongrunning.
## Attributes
* `analysisQuery` (*type:* `GoogleApi.CloudAsset.V1.Model.IamPolicyAnalysisQuery.t`, *default:* `nil`) - Required. The request query.
* `outputConfig` (*type:* `GoogleApi.CloudAsset.V1.Model.IamPolicyAnalysisOutputConfig.t`, *default:* `nil`) - Required. Output configuration indicating where the results will be output to.
* `savedAnalysisQuery` (*type:* `String.t`, *default:* `nil`) - Optional. The name of a saved query, which must be in the format of: * projects/project_number/savedQueries/saved_query_id * folders/folder_number/savedQueries/saved_query_id * organizations/organization_number/savedQueries/saved_query_id If both `analysis_query` and `saved_analysis_query` are provided, they will be merged together with the `saved_analysis_query` as base and the `analysis_query` as overrides. For more details of the merge behavior, please refer to the [MergeFrom](https://developers.google.com/protocol-buffers/docs/reference/cpp/google.protobuf.message#Message.MergeFrom.details) doc. Note that you cannot override primitive fields with default value, such as 0 or empty string, etc., because we use proto3, which doesn't support field presence yet.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:analysisQuery => GoogleApi.CloudAsset.V1.Model.IamPolicyAnalysisQuery.t() | nil,
:outputConfig => GoogleApi.CloudAsset.V1.Model.IamPolicyAnalysisOutputConfig.t() | nil,
:savedAnalysisQuery => String.t() | nil
}
field(:analysisQuery, as: GoogleApi.CloudAsset.V1.Model.IamPolicyAnalysisQuery)
field(:outputConfig, as: GoogleApi.CloudAsset.V1.Model.IamPolicyAnalysisOutputConfig)
field(:savedAnalysisQuery)
end
defimpl Poison.Decoder, for: GoogleApi.CloudAsset.V1.Model.AnalyzeIamPolicyLongrunningRequest do
def decode(value, options) do
GoogleApi.CloudAsset.V1.Model.AnalyzeIamPolicyLongrunningRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudAsset.V1.Model.AnalyzeIamPolicyLongrunningRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 56.150943 | 839 | 0.774866 |
7346cc6f275d80a6da005e9bce647bca77063f1f | 347 | exs | Elixir | priv/repo/seeds.exs | msaminsky/mulch | c5bd26fe2cb13cc7ebfc58d858686209febc32dc | [
"MIT"
] | 1 | 2020-10-20T22:50:45.000Z | 2020-10-20T22:50:45.000Z | priv/repo/seeds.exs | msaminsky/mulch | c5bd26fe2cb13cc7ebfc58d858686209febc32dc | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | msaminsky/mulch | c5bd26fe2cb13cc7ebfc58d858686209febc32dc | [
"MIT"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Mulch.Repo.insert!(%Mulch.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 28.916667 | 61 | 0.70317 |
7346d78cb44763ef04be13425508aca13c121515 | 884 | ex | Elixir | lib/credo/cli/filter.ex | jlgeering/credo | b952190ed758c262aa0d9bbee01227f9b1f0c63b | [
"MIT"
] | null | null | null | lib/credo/cli/filter.ex | jlgeering/credo | b952190ed758c262aa0d9bbee01227f9b1f0c63b | [
"MIT"
] | null | null | null | lib/credo/cli/filter.ex | jlgeering/credo | b952190ed758c262aa0d9bbee01227f9b1f0c63b | [
"MIT"
] | null | null | null | defmodule Credo.CLI.Filter do
alias Credo.Check.ConfigComment
alias Credo.Execution
alias Credo.Issue
alias Credo.SourceFile
def important(list, exec) when is_list(list) do
Enum.filter(list, &important?(&1, exec))
end
def important?(%Issue{} = issue, exec) do
issue.priority >= exec.min_priority
end
def important?(%SourceFile{filename: filename}, exec) do
exec
|> Execution.get_issues(filename)
|> Enum.any?(&important?(&1, exec))
end
def valid_issues(list, exec) when is_list(list) do
Enum.reject(list, fn issue ->
ignored_by_config_comment?(issue, exec)
end)
end
def ignored_by_config_comment?(%Issue{} = issue, exec) do
case exec.config_comment_map[issue.filename] do
list when is_list(list) ->
Enum.any?(list, &ConfigComment.ignores_issue?(&1, issue))
_ ->
false
end
end
end
| 23.891892 | 65 | 0.675339 |
7346db05e58a5335c315ea76b2427f94a96607bf | 1,137 | ex | Elixir | lib/maze_server_web/channels/user_socket.ex | thantez/maze_ai | 5d371beddf1626a45fd70d37a886f8d39ca80338 | [
"MIT"
] | null | null | null | lib/maze_server_web/channels/user_socket.ex | thantez/maze_ai | 5d371beddf1626a45fd70d37a886f8d39ca80338 | [
"MIT"
] | null | null | null | lib/maze_server_web/channels/user_socket.ex | thantez/maze_ai | 5d371beddf1626a45fd70d37a886f8d39ca80338 | [
"MIT"
] | null | null | null | defmodule MazeServerWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", MazeServerWeb.RoomChannel
channel "maze_socket:lobby", MazeServerWeb.MazeSocketChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# MazeServerWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 32.485714 | 85 | 0.707124 |
7346e1057a2a8c82ef4d002f5785ed70c86025f3 | 1,080 | exs | Elixir | elixir/samples/seat_server/mix.exs | afronski/playground-erlang | 6ac4b58b2fd717260c22a33284547d44a9b5038e | [
"MIT"
] | 2 | 2015-12-09T02:16:51.000Z | 2021-07-26T22:53:43.000Z | elixir/samples/seat_server/mix.exs | afronski/playground-erlang | 6ac4b58b2fd717260c22a33284547d44a9b5038e | [
"MIT"
] | null | null | null | elixir/samples/seat_server/mix.exs | afronski/playground-erlang | 6ac4b58b2fd717260c22a33284547d44a9b5038e | [
"MIT"
] | 1 | 2016-05-08T18:40:31.000Z | 2016-05-08T18:40:31.000Z | defmodule SeatServer.Mixfile do
use Mix.Project
def project do
[app: :seat_server,
version: "0.0.1",
elixir: "~> 1.0",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[mod: {SeatServer, []},
applications: [:phoenix, :phoenix_html, :cowboy, :logger,
:phoenix_ecto, :postgrex]]
end
# Specifies which paths to compile per environment
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies
#
# Type `mix help deps` for examples and options
defp deps do
[{:phoenix, "~> 1.0.2"},
{:phoenix_ecto, "~> 1.1"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.1"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:cowboy, "~> 1.0"}]
end
end
| 27 | 63 | 0.600926 |
7346e4c7624fbde0910e1185304268c0bad4993f | 1,145 | exs | Elixir | clients/drive/mix.exs | CertifiedrLi/elixir-google-api | 4e0e261dd06ee7753c356cca413783f3facd5f03 | [
"Apache-2.0"
] | null | null | null | clients/drive/mix.exs | CertifiedrLi/elixir-google-api | 4e0e261dd06ee7753c356cca413783f3facd5f03 | [
"Apache-2.0"
] | null | null | null | clients/drive/mix.exs | CertifiedrLi/elixir-google-api | 4e0e261dd06ee7753c356cca413783f3facd5f03 | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | defmodule GoogleApi.Drive.V3.Mixfile do
use Mix.Project
def project do
[app: :google_api_drive,
version: "0.0.1",
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/GoogleCloudPlatform/elixir-google-api/tree/master/clients/drive"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:tesla, "~> 0.8"},
{:poison, ">= 1.0.0"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Manages files in Drive including uploading, downloading, searching, detecting changes, and updating sharing permissions.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/GoogleCloudPlatform/elixir-google-api/tree/master/clients/drive",
"Homepage" => "https://developers.google.com/drive/"
}
]
end
end
| 24.361702 | 124 | 0.595633 |
7346fe65a5ba7d2e134b08bb1bd01373d41a840f | 2,386 | ex | Elixir | lib/cloister/application.ex | am-kantox/cloister | b5e661fab5dc2eeb27b175bf0069bf963b1afce6 | [
"MIT"
] | 12 | 2020-05-07T08:57:06.000Z | 2020-08-19T01:32:03.000Z | lib/cloister/application.ex | am-kantox/cloister | b5e661fab5dc2eeb27b175bf0069bf963b1afce6 | [
"MIT"
] | 21 | 2020-07-12T14:17:22.000Z | 2021-08-03T04:42:30.000Z | lib/cloister/application.ex | am-kantox/cloister | b5e661fab5dc2eeb27b175bf0069bf963b1afce6 | [
"MIT"
] | null | null | null | defmodule Cloister.Application do
@moduledoc false
use Application
require Logger
@consensus 3
@consensus_timeout 3_000
@impl Application
def start(_type, _args) do
Logger.debug(
"[🕸️ :#{node()}] starting cloister with config:\n" <>
inspect(Application.get_all_env(:cloister))
)
manager = Application.get_env(:cloister, :manager, [])
children = [
{Cloister.Manager, [manager]}
]
opts = [strategy: :one_for_one, name: Cloister.Supervisor]
Supervisor.start_link(children, opts)
end
@impl Application
def prep_stop(_state),
do: Cloister.Monitor.terminate({:shutdown, :application}, Cloister.Monitor.state())
@impl Application
def start_phase(:warming_up, _start_type, phase_args) do
phase_args
|> Keyword.get(:consensus, Application.get_env(:cloister, :consensus, @consensus))
|> wait_consensus(0)
Logger.info("[🕸️ :#{node()}] Cloister → Phase I. Warming up, waiting for consensus.")
end
@impl Application
def start_phase(:rehash_on_up, _start_type, phase_args) do
Cloister.Monitor.update_groups(phase_args)
Logger.info("[🕸️ :#{node()}] Cloister → Phase II. Updating groups.")
end
@spec wait_consensus(consensus :: non_neg_integer(), retries :: non_neg_integer()) :: :ok
defp wait_consensus(consensus, retries) do
Process.sleep(@consensus_timeout)
do_wait_consensus(Cloister.Modules.info_module().nodes(), consensus, retries)
end
@spec do_wait_consensus(
[node() | {:error, :no_such_ring}],
consensus :: non_neg_integer(),
retries :: non_neg_integer()
) :: :ok
defp do_wait_consensus([{:error, :no_such_ring} | _], consensus, retries),
do: wait_consensus(consensus, retries)
defp do_wait_consensus(nodes, consensus, retries) when is_list(nodes) do
# TODO understand if this might boost the startup
# nodes = [node() | Node.list()]
nodes
|> Enum.count()
|> case do
n when n < consensus ->
message = "[🕸️ :#{node()}] ⏳ retries: [#{retries}], nodes: [" <> inspect(nodes) <> "]"
case div(retries, 10) do
0 -> Logger.warn(message)
_ -> Logger.debug(message)
end
wait_consensus(consensus, retries + 1)
_ ->
Logger.info("[🕸️ :#{node()}] ⌚ retries: [#{retries}], nodes: [" <> inspect(nodes) <> "]")
end
end
end
| 28.746988 | 97 | 0.643336 |
73472e57fac2062bd2a1c89128b2a26e667b3a2d | 203 | exs | Elixir | config/config.exs | libitx/terminus | f394aea4ced49aec216203b19c33f53578eac7ec | [
"Apache-2.0"
] | 16 | 2020-04-15T14:45:15.000Z | 2022-02-28T03:28:22.000Z | config/config.exs | libitx/terminus | f394aea4ced49aec216203b19c33f53578eac7ec | [
"Apache-2.0"
] | 1 | 2020-07-28T21:39:41.000Z | 2020-07-29T13:05:45.000Z | config/config.exs | libitx/terminus | f394aea4ced49aec216203b19c33f53578eac7ec | [
"Apache-2.0"
] | 3 | 2021-01-10T22:39:43.000Z | 2022-03-15T07:39:22.000Z | use Mix.Config
case Mix.env do
:test ->
config :logger, level: :error
config :terminus,
scheme: :http,
port: 8088,
token: "test"
_ ->
config :logger, level: :info
end
| 14.5 | 33 | 0.571429 |
734748dfd10b948ae874857ed0ad5736dcb76c44 | 3,533 | exs | Elixir | apps/nerves_hub_www/test/nerves_hub_www_web/controllers/org_user_controller_test.exs | tonnenpinguin/nerves_hub_web | 9d36921eb7e20d20a3e3bd308cc98ad7b60cfa72 | [
"Apache-2.0"
] | 111 | 2018-07-25T01:07:51.000Z | 2022-01-25T17:03:01.000Z | apps/nerves_hub_www/test/nerves_hub_www_web/controllers/org_user_controller_test.exs | tonnenpinguin/nerves_hub_web | 9d36921eb7e20d20a3e3bd308cc98ad7b60cfa72 | [
"Apache-2.0"
] | 361 | 2018-07-22T12:53:00.000Z | 2022-03-31T18:50:34.000Z | apps/nerves_hub_www/test/nerves_hub_www_web/controllers/org_user_controller_test.exs | tonnenpinguin/nerves_hub_web | 9d36921eb7e20d20a3e3bd308cc98ad7b60cfa72 | [
"Apache-2.0"
] | 54 | 2018-08-26T02:58:04.000Z | 2022-03-09T10:12:19.000Z | defmodule NervesHubWWWWeb.OrgUserControllerTest do
use NervesHubWWWWeb.ConnCase.Browser, async: true
use Bamboo.Test
alias NervesHubWebCore.{Accounts, Fixtures}
setup context do
user = Fixtures.user_fixture(%{username: context.user.username <> "0"})
Map.put(context, :user2, user)
end
describe "index" do
test "lists all users in an organization", %{
conn: conn,
org: org
} do
org_users = Accounts.get_org_users(org)
conn = get(conn, Routes.org_user_path(conn, :index, org.name))
assert html_response(conn, 200) =~ "Users"
Enum.each(org_users, fn org_user ->
assert html_response(conn, 200) =~ org_user.user.username
end)
end
test "user is able to invite users to org", %{conn: conn, org: org} do
conn = get(conn, Routes.org_user_path(conn, :index, org.name))
assert html_response(conn, 200) =~ "Add New User"
end
test "user is unable to invite users to user org", %{conn: conn, user: user} do
conn = get(conn, Routes.org_user_path(conn, :index, user.username))
refute html_response(conn, 200) =~ "Add New User"
end
end
describe "update org_user role" do
setup [:create_org_user]
test "updates role and redirects", %{conn: conn, org: org, user2: user} do
conn =
put(conn, Routes.org_user_path(conn, :update, org.name, user.id), %{
org_user: %{role: "write"}
})
assert redirected_to(conn) == Routes.org_user_path(conn, :index, org.name)
conn = get(conn, Routes.org_user_path(conn, :index, org.name))
assert html_response(conn, 200) =~ "Role updated"
assert html_response(conn, 200) =~ "write"
end
test "shows error", %{conn: conn, org: org, user2: user} do
conn =
put(conn, Routes.org_user_path(conn, :update, org.name, user.id), %{
org_user: %{role: "invalid role"}
})
assert html_response(conn, 200) =~ "Error updating role"
assert html_response(conn, 200) =~ "is invalid"
end
end
describe "delete valid user" do
setup [:create_org_user]
test "removes existing user", %{conn: conn, org: org, user2: user} do
conn = delete(conn, Routes.org_user_path(conn, :delete, org.name, user.id))
assert redirected_to(conn) == Routes.org_user_path(conn, :index, org.name)
# An email should have been sent
instigator = conn.assigns.user.username
assert_email_delivered_with(
subject: "[NervesHub] User #{instigator} removed #{user.username} from #{org.name}"
)
assert {:error, :not_found} = Accounts.get_org_user(org, user)
conn = get(conn, Routes.org_user_path(conn, :index, org.name))
assert html_response(conn, 200) =~ "User removed"
end
end
describe "delete invalid user" do
test "fails to remove existing user", %{conn: conn, org: org, user: user} do
{:ok, org_user} = Accounts.get_org_user(org, user)
conn = delete(conn, Routes.org_user_path(conn, :delete, org.name, user.id))
assert redirected_to(conn) == Routes.org_user_path(conn, :index, org.name)
assert_no_emails_delivered()
assert {:ok, ^org_user} = Accounts.get_org_user(org, user)
conn = get(conn, Routes.org_user_path(conn, :index, org.name))
assert html_response(conn, 200) =~ "Could not remove user"
end
end
defp create_org_user(%{user2: user, org: org}) do
{:ok, org_user} = Accounts.add_org_user(org, user, %{role: :admin})
{:ok, %{org_user: org_user}}
end
end
| 33.647619 | 91 | 0.650722 |
73474be9472c4ceaf45c00a52585c32581bfe243 | 736 | ex | Elixir | lib/mongo/query.ex | zookzook/mongodb | 7055a282cfa55efc62dfc4542092fc6109a53d77 | [
"Apache-2.0"
] | null | null | null | lib/mongo/query.ex | zookzook/mongodb | 7055a282cfa55efc62dfc4542092fc6109a53d77 | [
"Apache-2.0"
] | 7 | 2018-10-12T07:53:57.000Z | 2018-10-14T19:06:55.000Z | lib/mongo/query.ex | zookzook/mongodb | 7055a282cfa55efc62dfc4542092fc6109a53d77 | [
"Apache-2.0"
] | null | null | null | defmodule Mongo.Query do
@moduledoc false
defstruct action: nil, extra: nil, encoded?: false
end
defimpl DBConnection.Query, for: Mongo.Query do
import Mongo.Messages, only: [op_reply: 1, op_reply: 2]
def parse(query, _opts), do: query
def describe(query, _opts), do: query
def encode(query, params, _opts) do
if query.encoded? do
params
else
Enum.map(params, fn
nil -> ""
doc -> BSON.Encoder.document(doc)
end)
end
end
def decode(_query, :ok, _opts), do: :ok
def decode(_query, wire_version, _opts) when is_integer(wire_version), do: wire_version
def decode(_query, op_reply(docs: docs) = reply, _opts), do: op_reply(reply, docs: BSON.Decoder.documents(docs))
end
| 26.285714 | 114 | 0.677989 |
7347628d507f5f6b816ed7401ff593bafa27b8ed | 815 | exs | Elixir | mssqlsample/mix.exs | Dmdv/ElixirPlayground | 02d9e8a7fdd6e8742e200430debc9f0ec7fd28a1 | [
"Apache-2.0"
] | null | null | null | mssqlsample/mix.exs | Dmdv/ElixirPlayground | 02d9e8a7fdd6e8742e200430debc9f0ec7fd28a1 | [
"Apache-2.0"
] | null | null | null | mssqlsample/mix.exs | Dmdv/ElixirPlayground | 02d9e8a7fdd6e8742e200430debc9f0ec7fd28a1 | [
"Apache-2.0"
] | null | null | null | defmodule Mssqlsample.Mixfile do
use Mix.Project
def project do
[app: :mssqlsample,
version: "0.1.0",
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[extra_applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:my_dep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:my_dep, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:postgrex, ">= 0.0.0"},
{:ecto, "~> 2.1"}
]
end
end
| 22.027027 | 79 | 0.598773 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.