hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7926fd6d310fac09675f8579ecebede8e3f068de | 258 | ex | Elixir | lib/prm/web/views/global_parameter_view.ex | EDENLABLLC/prm.api | 86743f26874f47ce3d48010ccf5d2cd596a3474b | [
"Apache-2.0"
] | 1 | 2017-07-27T16:03:28.000Z | 2017-07-27T16:03:28.000Z | lib/prm/web/views/global_parameter_view.ex | EDENLABLLC/prm.api | 86743f26874f47ce3d48010ccf5d2cd596a3474b | [
"Apache-2.0"
] | null | null | null | lib/prm/web/views/global_parameter_view.ex | EDENLABLLC/prm.api | 86743f26874f47ce3d48010ccf5d2cd596a3474b | [
"Apache-2.0"
] | null | null | null | defmodule PRM.Web.GlobalParameterView do
@moduledoc false
use PRM.Web, :view
def render("index.json", %{global_parameters: global_parameters}) do
Enum.reduce(global_parameters, %{}, fn(x, acc) -> Map.put(acc, x.parameter, x.value) end)
end
end
| 25.8 | 93 | 0.713178 |
79270ada7a9d474cdfe282eb944413460112bcc6 | 534 | exs | Elixir | config/test.exs | gpad/les | 2317b8055ab24aa857a6cda06f6e529c992c668c | [
"Apache-2.0"
] | 11 | 2018-04-06T14:02:15.000Z | 2020-12-09T10:44:03.000Z | config/test.exs | gpad/les | 2317b8055ab24aa857a6cda06f6e529c992c668c | [
"Apache-2.0"
] | 1 | 2018-11-18T02:35:23.000Z | 2018-11-18T02:35:23.000Z | config/test.exs | gpad/les | 2317b8055ab24aa857a6cda06f6e529c992c668c | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :les, LesWeb.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :les, Les.Repo,
adapter: Ecto.Adapters.Postgres,
username: System.get_env("PGUSER") || "postgres",
password: System.get_env("PGPASSWORD") || "postgres",
database: "les_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
| 26.7 | 56 | 0.720974 |
792720fe907f04e929f4c3c39941c1a6fe45d201 | 3,682 | exs | Elixir | apps/platform_runner/test/performance_test.exs | NathanielScottStevens/hindsight | 0dda1a931cff85b62eb53d623cc59cdb970ec33a | [
"Apache-2.0"
] | null | null | null | apps/platform_runner/test/performance_test.exs | NathanielScottStevens/hindsight | 0dda1a931cff85b62eb53d623cc59cdb970ec33a | [
"Apache-2.0"
] | null | null | null | apps/platform_runner/test/performance_test.exs | NathanielScottStevens/hindsight | 0dda1a931cff85b62eb53d623cc59cdb970ec33a | [
"Apache-2.0"
] | null | null | null | defmodule Platform.Runner.PerformanceTest do
use ExUnit.Case
use Divo
import AssertAsync
@moduletag performance: true, divo: true
setup do
Logger.configure(level: :info)
bp = Bypass.open()
data = File.read!("perf.data")
Bypass.stub(bp, "GET", "/file.csv", fn conn ->
Plug.Conn.resp(conn, 200, data)
end)
[bypass: bp]
end
@tag timeout: :infinity
test "performance through persist", %{bypass: bypass} do
Benchee.run(
%{
"csv_persist" => fn -> persist_csv(port: bypass.port, dataset: "persisted") end,
"csv_broadcast" => fn -> broadcast_csv(port: bypass.port, dataset: "broadcasted") end
},
warmup: 0
)
end
defp persist_csv(opts) do
ds = Keyword.fetch!(opts, :dataset)
csv(opts)
persist =
Load.Persist.new!(
id: "perf-#{ds}-persist-1",
dataset_id: "perf-#{ds}",
subset_id: "default",
source: "perf-#{ds}-csv",
destination: "perf_#{ds}_persist",
config: %{
"kafka" => %{
"partitions" => 4,
"partitioner" => "md5"
}
}
)
Gather.Application.instance()
|> Events.send_load_persist_start("performance", persist)
session =
Prestige.new_session(
url: "http://localhost:8080",
user: "hindsight",
catalog: "hive",
schema: "default"
)
assert_async sleep: 1_000, max_tries: 1_000, debug: true do
with {:ok, result} <- Prestige.query(session, "select count(*) from perf_#{ds}_persist") do
assert result.rows == [[100_000]]
else
{:error, reason} -> flunk(inspect(reason))
end
end
end
defp broadcast_csv(opts) do
ds = Keyword.fetch!(opts, :dataset)
csv(opts)
broadcast =
Load.Broadcast.new!(
id: "perf-#{ds}-broadcast-1",
dataset_id: "perf-#{ds}",
subset_id: "default",
source: "perf-#{ds}-csv",
destination: "perf_#{ds}_broadcast"
)
{:ok, _} =
PlatformRunner.BroadcastClient.join(
caller: self(),
topic: broadcast.destination
)
Gather.Application.instance()
|> Events.send_load_broadcast_start("performance", broadcast)
assert_receive %{"letter" => "b", "number" => 100_000}, 90_000
end
defp csv(opts) do
# dictionary = Enum.map(1..100, fn i -> Dictionary.Type.String.new!(name: "string_#{i}") end)
# headers = Enum.map(dictionary, &Map.get(&1, :name))
ds = Keyword.fetch!(opts, :dataset)
dictionary =
Dictionary.from_list([
Dictionary.Type.String.new!(name: "letter"),
Dictionary.Type.Integer.new!(name: "number")
])
headers = ["letter", "number"]
extract =
Extract.new!(
id: "perf-#{ds}-extract-1",
dataset_id: "perf-#{ds}",
subset_id: "default",
destination: "perf-#{ds}-csv",
steps: [
Extract.Http.Get.new!(url: "http://localhost:#{Keyword.fetch!(opts, :port)}/file.csv"),
Extract.Decode.Csv.new!(headers: headers)
],
dictionary: dictionary,
message_key: ["letter"],
config: %{
"kafka" => %{
"partitions" => 4,
"partitioner" => "md5"
}
}
)
Gather.Application.instance()
|> Events.send_extract_start("performance", extract)
transform =
Transform.new!(
id: "perf-#{ds}-tranform-1",
dataset_id: "perf-#{ds}",
subset_id: "default",
dictionary: dictionary,
Steps: []
)
Gather.Application.instance()
|> Events.send_transform_define("performance", transform)
end
end
| 25.393103 | 97 | 0.564096 |
7927512bcd6968517d52ffe212a0647f8ae14ed7 | 1,502 | exs | Elixir | mix.exs | wyeworks/elixir_console | f72147224131cb43ebea4a5929030928cdf155d0 | [
"MIT"
] | 60 | 2019-11-23T15:54:24.000Z | 2022-03-24T15:56:32.000Z | mix.exs | wyeworks/elixir_console | f72147224131cb43ebea4a5929030928cdf155d0 | [
"MIT"
] | 62 | 2019-11-11T00:44:38.000Z | 2022-01-10T13:12:48.000Z | mix.exs | wyeworks/elixir_console | f72147224131cb43ebea4a5929030928cdf155d0 | [
"MIT"
] | 5 | 2020-01-15T00:44:30.000Z | 2021-05-25T05:06:27.000Z | defmodule ElixirConsole.MixProject do
use Mix.Project
def project do
[
app: :elixir_console,
version: "0.1.0",
elixir: "~> 1.12.3",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {ElixirConsole.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.6.0"},
{:phoenix_live_view, "~> 0.16.0"},
{:phoenix_html, "~> 3.0"},
{:phoenix_live_reload, "~> 1.3", only: :dev},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.1"},
{:earmark, "~> 1.4.0"},
{:floki, "~> 0.31.0", only: :test},
{:sentry, "~> 7.0"},
{:wallaby, "~> 0.28", only: :test, runtime: false}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
test: ["test"]
]
end
end
| 25.033333 | 84 | 0.583888 |
792753c24c0f7ff31b937551bd30300f38651028 | 64 | ex | Elixir | examples/your_app/lib/your_app_web/views/session_view.ex | henriquefernandez/entrance | ac10316678a89b6904b17bdcf7a9061d67f055fc | [
"MIT"
] | 11 | 2020-01-25T16:17:24.000Z | 2020-06-03T10:46:27.000Z | examples/your_app/lib/your_app_web/views/session_view.ex | henriquefernandez/entrance | ac10316678a89b6904b17bdcf7a9061d67f055fc | [
"MIT"
] | null | null | null | examples/your_app/lib/your_app_web/views/session_view.ex | henriquefernandez/entrance | ac10316678a89b6904b17bdcf7a9061d67f055fc | [
"MIT"
] | null | null | null | defmodule YourAppWeb.SessionView do
use YourAppWeb, :view
end
| 16 | 35 | 0.8125 |
79277cadaa90ae0c0108fd132d7552c16b6a222d | 637 | exs | Elixir | metex/mix.exs | laixintao/elixirlabs | 40d4b5e0b23279855b35a2911d27718f93a79f7e | [
"BSD-3-Clause"
] | null | null | null | metex/mix.exs | laixintao/elixirlabs | 40d4b5e0b23279855b35a2911d27718f93a79f7e | [
"BSD-3-Clause"
] | null | null | null | metex/mix.exs | laixintao/elixirlabs | 40d4b5e0b23279855b35a2911d27718f93a79f7e | [
"BSD-3-Clause"
] | null | null | null | defmodule Metex.MixProject do
use Mix.Project
def project do
[
app: :metex,
version: "0.1.0",
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger, :httposion]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
{:httpoison, "~> 0.9.0"},
{:json, "~> 0.3.0"}
]
end
end
| 20.548387 | 87 | 0.55416 |
7927c1ad90cc70d04c89a1cce71bb9222475200e | 395 | ex | Elixir | apps/subs_services/lib/subs_services/application.ex | gitter-badger/opensubs.io | 76d5b4d355a530c8f496efe3ac2095d87f078997 | [
"MIT"
] | 36 | 2018-02-03T10:58:51.000Z | 2020-09-19T20:52:17.000Z | apps/subs_services/lib/subs_services/application.ex | joaquimadraz/subs | 9a26144ed660d5ece849ee447a9e5de53a311408 | [
"MIT"
] | 8 | 2018-01-17T17:15:48.000Z | 2020-07-06T08:56:54.000Z | apps/subs_services/lib/subs_services/application.ex | joaquimadraz/subs | 9a26144ed660d5ece849ee447a9e5de53a311408 | [
"MIT"
] | 10 | 2018-05-21T18:20:32.000Z | 2022-01-29T14:25:48.000Z | defmodule SubsServices.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
import Supervisor.Spec, warn: false
Supervisor.start_link([
worker(SubsServices.Store, ["services.json"])
], strategy: :one_for_one, name: SubsServices.Supervisor)
end
end
| 24.6875 | 61 | 0.734177 |
7927ca0c5c6484054028fa767744e1eb81c60ba1 | 1,137 | ex | Elixir | apps/theta_web/lib/theta_web/amp_cache.ex | LangPham/thetaproject | c6479d1b761ff58fe6ae5f82e2d9de87a8658883 | [
"MIT"
] | null | null | null | apps/theta_web/lib/theta_web/amp_cache.ex | LangPham/thetaproject | c6479d1b761ff58fe6ae5f82e2d9de87a8658883 | [
"MIT"
] | 11 | 2020-07-21T09:34:54.000Z | 2021-08-29T07:38:02.000Z | apps/theta_web/lib/theta_web/amp_cache.ex | LangPham/thetaproject | c6479d1b761ff58fe6ae5f82e2d9de87a8658883 | [
"MIT"
] | null | null | null | defmodule ThetaWeb.AmpCache do
@moduledoc """
ThetaWeb.AmpCache.sign_url "huong-dan-drupal-7-20200304153313.html"
Read more: https://developers.google.com/amp/cache/update-cache
echo -n > url.txt "/update-cache/c/s/example.com/article?amp_action=flush&_ts=$(date +%s)" && cat url.txt | openssl dgst -sha256 -sign private-key.pem > signature.bin
"""
def sign_url(url_origin) do
prefix_amp = "www.theta.vn/amp/"
url_origin = "#{prefix_amp}#{url_origin}"
amp_ts =
DateTime.now!("Etc/UTC")
|> DateTime.to_unix()
url = "/update-cache/c/s/#{url_origin}?amp_action=flush&_ts=#{amp_ts}"
Path.join(:code.priv_dir(:theta_web), "cert/private-key.pem")
{:ok, rsa_private_key} =
File.read(Path.join(:code.priv_dir(:theta_web), "cert/private-key.pem"))
{:ok, signature} = RsaEx.sign(url, rsa_private_key)
{:ok, rsa_public_key} = File.read(Path.join(:code.priv_dir(:theta_web), "cert/apikey.pub"))
base64 = Base.url_encode64(signature, padding: false)
prefix = "https://www-theta-vn.cdn.ampproject.org"
"#{prefix}#{url}&_url_signature=#{base64}"
end
end
| 36.677419 | 173 | 0.67898 |
79280e44443d82f21bb5c5d6b37d81acbbc30ef1 | 25 | ex | Elixir | kv_store_mix/lib/kv_store_mix.ex | javierarilos/elixir_playground | 3148fd22ad67de45d9597747b7fcd453f74e70ae | [
"Apache-2.0"
] | null | null | null | kv_store_mix/lib/kv_store_mix.ex | javierarilos/elixir_playground | 3148fd22ad67de45d9597747b7fcd453f74e70ae | [
"Apache-2.0"
] | null | null | null | kv_store_mix/lib/kv_store_mix.ex | javierarilos/elixir_playground | 3148fd22ad67de45d9597747b7fcd453f74e70ae | [
"Apache-2.0"
] | null | null | null | defmodule KvStore do
end
| 8.333333 | 20 | 0.84 |
79281ca304c8fad87fc468f7ea92642857fcac35 | 67 | ex | Elixir | lib/fish_web/views/pow/registration_view.ex | wdiechmann/fish | b63fe109bbfc1cbe515ac31f9adcd9b57c6b21c8 | [
"MIT"
] | 1 | 2021-02-09T23:49:40.000Z | 2021-02-09T23:49:40.000Z | lib/fish_web/views/pow/registration_view.ex | wdiechmann/fish | b63fe109bbfc1cbe515ac31f9adcd9b57c6b21c8 | [
"MIT"
] | null | null | null | lib/fish_web/views/pow/registration_view.ex | wdiechmann/fish | b63fe109bbfc1cbe515ac31f9adcd9b57c6b21c8 | [
"MIT"
] | null | null | null | defmodule FishWeb.Pow.RegistrationView do
use FishWeb, :view
end
| 16.75 | 41 | 0.80597 |
792847829326136eeac6bceae8349f3e3e79b5db | 2,631 | ex | Elixir | extended_example/lib/web/views/dashboard_view.ex | PJUllrich/event-sourcing-with-elixir | 7f70e6bc49d9d93f1d86513a1f358e41e07b8304 | [
"MIT"
] | 19 | 2020-10-08T14:05:30.000Z | 2022-03-18T08:43:11.000Z | extended_example/lib/web/views/dashboard_view.ex | PJUllrich/event-sourcing-with-elixir | 7f70e6bc49d9d93f1d86513a1f358e41e07b8304 | [
"MIT"
] | null | null | null | extended_example/lib/web/views/dashboard_view.ex | PJUllrich/event-sourcing-with-elixir | 7f70e6bc49d9d93f1d86513a1f358e41e07b8304 | [
"MIT"
] | 3 | 2021-02-19T08:31:58.000Z | 2021-12-09T05:28:55.000Z | defmodule Web.DashboardView do
use Web, :view
def status(%Vehicle{out_for_delivery: true}) do
render(Web.IconView, "badge.html",
color: "yellow",
label: "Out for Delivery",
icon: "fa-truck"
)
end
def status(%Vehicle{out_for_delivery: false}) do
render(Web.IconView, "badge.html", color: "teal", label: "Packing", icon: "fa-box-open")
end
def status(%{delivered_successfully: true}) do
render(Web.IconView, "badge.html", color: "green", label: "Delivered", icon: "fa-check")
end
def status(%{delivered_successfully: false}) do
render(Web.IconView, "badge.html",
color: "red",
label: "Delivery failed",
icon: "fa-times-circle"
)
end
def status(%{out_for_delivery: true}) do
render(Web.IconView, "badge.html",
color: "yellow",
label: "Out for Delivery",
icon: "fa-truck"
)
end
def status(%{scheduled_for: scheduled_for}) when not is_nil(scheduled_for) do
render(Web.IconView, "badge.html",
color: "teal",
label: "Scheduled",
icon: "fa-clock"
)
end
def status(_shipment) do
render(Web.IconView, "badge.html", color: "gray", label: "Registered", icon: "fa-plus")
end
def out_for_delivery_badge(%{delivered_successfully: true, delivering_vehicle: vehicle_id}) do
render(Web.IconView, "badge.html",
color: "green",
label: "Truck #{vehicle_id}",
icon: "fa-check"
)
end
def out_for_delivery_badge(%{delivered_successfully: false, delivering_vehicle: vehicle_id}) do
render(Web.IconView, "badge.html",
color: "red",
label: "Truck #{vehicle_id}",
icon: "fa-times-circle"
)
end
def out_for_delivery_badge(%{out_for_delivery: true, delivering_vehicle: vehicle_id}) do
render(Web.IconView, "badge.html",
color: "yellow",
label: "Truck #{vehicle_id}",
icon: "fa-truck"
)
end
def out_for_delivery_badge(%{scheduled_for_vehicle: vehicle_id}) when is_binary(vehicle_id) do
render(Web.IconView, "badge.html",
color: "teal",
label: "Truck #{vehicle_id}",
icon: "fa-clock"
)
end
def out_for_delivery_badge(_), do: pill(nil)
def delivered_badge(%{delivered_successfully: true}) do
render(Web.IconView, "badge.html",
color: "green",
label: "Delivered",
icon: "fa-check"
)
end
def delivered_badge(%{delivered_successfully: false}) do
render(Web.IconView, "badge.html",
color: "red",
label: "Delivery failed",
icon: "fa-times-circle"
)
end
def delivered_badge(%{delivered_successfully: nil}), do: pill(nil)
end
| 26.31 | 97 | 0.643101 |
79286637f6561d6115a7252e5b9a84386db12bfc | 3,038 | ex | Elixir | test/support/fixtures/contact_schema.ex | zoldar/absinthe | 72ff9f91fcc0a261f9965cf8120c7c72ff6e4c7c | [
"MIT"
] | 4,101 | 2016-03-02T03:49:20.000Z | 2022-03-31T05:46:01.000Z | test/support/fixtures/contact_schema.ex | zoldar/absinthe | 72ff9f91fcc0a261f9965cf8120c7c72ff6e4c7c | [
"MIT"
] | 889 | 2016-03-02T16:06:59.000Z | 2022-03-31T20:24:12.000Z | test/support/fixtures/contact_schema.ex | zoldar/absinthe | 72ff9f91fcc0a261f9965cf8120c7c72ff6e4c7c | [
"MIT"
] | 564 | 2016-03-02T07:49:59.000Z | 2022-03-06T14:40:59.000Z | defmodule Absinthe.Fixtures.ContactSchema do
use Absinthe.Schema
use Absinthe.Fixture
@bruce %{name: "Bruce", age: 35}
@others [
%{name: "Joe", age: 21},
%{name: "Jill", age: 43}
]
@business %{name: "Someplace", employee_count: 11}
query do
field :person,
type: :person,
resolve: fn _, _ ->
{:ok, @bruce}
end
field :contact,
type: :contact,
args: [
business: [type: :boolean, default_value: false]
],
resolve: fn
%{business: false}, _ ->
{:ok, %{entity: @bruce}}
%{business: true}, _ ->
{:ok, %{entity: @business}}
end
field :first_search_result,
type: :search_result,
resolve: fn _, _ ->
{:ok, @bruce}
end
field :search_results,
type: non_null(list_of(non_null(:search_result))),
resolve: fn _, _ ->
{:ok, [@bruce, @business]}
end
field :profile,
type: :person,
args: [name: [type: non_null(:string)]],
resolve: fn
%{name: "Bruce"}, _ ->
{:ok, @bruce}
_, _ ->
{:ok, nil}
end
end
mutation do
field :person,
type: :person,
args: [
profile: [type: :profile_input]
],
resolve: fn %{profile: profile} ->
# Return it like it's a person
{:ok, profile}
end
end
subscription do
end
input_object :profile_input do
description "The basic details for a person"
field :code, type: non_null(:string)
field :name, type: :string, description: "The person's name", default_value: "Janet"
field :age, type: :integer, description: "The person's age", default_value: 43
end
interface :named_entity do
description "A named entity"
field :name, type: :string
resolve_type fn
%{age: _}, _ ->
:person
%{employee_count: _}, _ ->
:business
end
end
object :person do
description "A person"
field :name, :string
field :age, :integer
field :address, :string, deprecate: "change of privacy policy"
field :others,
type: list_of(:person),
resolve: fn _, _ ->
{:ok, @others}
end
interface :named_entity
end
object :business do
description "A business"
field :name, :string
field :employee_count, :integer
interface :named_entity
end
union :search_result do
description "A search result"
types [:business, :person]
resolve_type fn
%{age: _}, _ ->
:person
%{employee_count: _}, _ ->
:business
end
end
object :contact do
field :entity, :named_entity
import_fields :contact_method
end
object :contact_method do
field :phone_number, :string
field :address, :string
end
scalar :name do
serialize &to_string/1
parse fn
%Absinthe.Blueprint.Input.String{} = string ->
string.value
_ ->
:error
end
end
object :unused do
field :an_unused_field, :string
end
end
| 19.350318 | 88 | 0.568466 |
7928701baa1f32eec66b5cf190b9774496894e47 | 268 | ex | Elixir | debian/mingw-w64-pdcurses.default.ex | mingw-deb/pdcurses | aa2dec102403fc95f433f22e856f5026557aba10 | [
"X11"
] | null | null | null | debian/mingw-w64-pdcurses.default.ex | mingw-deb/pdcurses | aa2dec102403fc95f433f22e856f5026557aba10 | [
"X11"
] | null | null | null | debian/mingw-w64-pdcurses.default.ex | mingw-deb/pdcurses | aa2dec102403fc95f433f22e856f5026557aba10 | [
"X11"
] | null | null | null | # Defaults for mingw-w64-pdcurses initscript
# sourced by /etc/init.d/mingw-w64-pdcurses
# installed at /etc/default/mingw-w64-pdcurses by the maintainer scripts
#
# This is a POSIX shell fragment
#
# Additional options that are passed to the Daemon.
DAEMON_OPTS=""
| 24.363636 | 72 | 0.768657 |
792873929a59528c5f42fe64a80da4079738841f | 1,929 | ex | Elixir | lib/mix/tasks/tail.ex | soe/ex_aws_custom | 64ed00b7a33c5956e96dbb18512043d2f45822a5 | [
"MIT",
"Unlicense"
] | null | null | null | lib/mix/tasks/tail.ex | soe/ex_aws_custom | 64ed00b7a33c5956e96dbb18512043d2f45822a5 | [
"MIT",
"Unlicense"
] | null | null | null | lib/mix/tasks/tail.ex | soe/ex_aws_custom | 64ed00b7a33c5956e96dbb18512043d2f45822a5 | [
"MIT",
"Unlicense"
] | null | null | null | defmodule Mix.Tasks.Kinesis.Tail do
alias ExAws.Kinesis
require Logger
use Mix.Task
@shortdoc "tails a stream"
@moduledoc """
Tails a Stream
## Usage
kinesis.tail [stream_name] [options]
## Options
--poll N Time in seconds between polling. Default: 5
--debug Sets debug_requests: true on ex_aws. Logs all kinesis requests
## Examples
$mix kinesis.tail Elixir.Jetstream.Messages.Timeseries
$mix kinesis.tail logs --debug --poll 10
"""
def run(argv) do
{:ok, _} = Application.ensure_all_started(:ex_aws)
{opts, [stream_name|_], _} = OptionParser.parse(argv)
sleep_time = Keyword.get(opts, :poll, "5") |> String.to_integer
debug = Keyword.get(opts, :debug, false)
Application.put_env(:ex_aws, :debug_requests, debug)
Application.put_env(:ex_aws, :kinesis_namespace, nil)
Logger.info "Streaming from #{stream_name |> ExAws.Config.namespace(:kinesis)}"
stream_name
|> get_shards
|> Enum.map(&Kinesis.get_shard_iterator(stream_name, &1["ShardId"], "LATEST"))
|> Enum.map(&get_records(&1, sleep_time))
end
def get_shards(name) do
case Kinesis.Lazy.describe_stream(name) do
{:ok, %{"StreamDescription" => %{"Shards" => shards}}} -> shards
error -> raise error
end
end
def get_records({:ok, %{"ShardIterator" => iterator}}, wait_time) do
iterator
|> Kinesis.Lazy.get_records(%{}, fn
[] -> :timer.sleep(wait_time * 1000); []
val -> val
end)
|> Stream.map(&format_msg/1)
|> Stream.run
end
defp format_msg(msg) do
IO.ANSI.format_fragment([:blue, msg["PartitionKey"], :bright, " | ",
:reset, msg["Data"] |> ensure_new_line ])
|> IO.chardata_to_string
|> IO.write
end
defp ensure_new_line(data) do
case String.last(data) do
"\n" -> data
_ -> [data, "\n"]
end
end
end
| 26.067568 | 84 | 0.620529 |
7928920cfe374a9997804e6fcd4171bbd6dc60ea | 440 | exs | Elixir | alchemist-server/test/api_test.exs | benjcal/vscode-elixir | ccf9457ebed29b80f8ac4f5e855cc9596d1546f6 | [
"MIT"
] | 221 | 2016-05-03T09:46:04.000Z | 2021-02-20T05:08:18.000Z | alchemist-server/test/api_test.exs | benjcal/vscode-elixir | ccf9457ebed29b80f8ac4f5e855cc9596d1546f6 | [
"MIT"
] | 104 | 2017-02-28T16:26:12.000Z | 2021-02-19T22:31:03.000Z | alchemist-server/test/api_test.exs | benjcal/vscode-elixir | ccf9457ebed29b80f8ac4f5e855cc9596d1546f6 | [
"MIT"
] | 59 | 2017-03-12T07:07:35.000Z | 2021-02-26T19:41:15.000Z | Code.require_file "test_helper.exs", __DIR__
Code.require_file "../lib/api/comp.exs", __DIR__
Code.require_file "../lib/api/docl.exs", __DIR__
defmodule APITest do
use ExUnit.Case, async: true
import ExUnit.CaptureIO
alias Alchemist.API
test "DOCL request" do
assert capture_io(fn ->
API.Docl.process(['defmodule', [], []])
end) =~ """
Defines a module given by name with the given contents.
"""
end
end
| 22 | 59 | 0.679545 |
7928a7930e1b2ae27a6e70c59ac1a6fa38975ed4 | 819 | ex | Elixir | apps/cashtrail/test/support/factory/helpers.ex | maxmaccari/cashtrail | 74ec7ff1f8a41299a6086dc98719a32cbfbe9ee7 | [
"MIT"
] | 6 | 2020-05-02T01:12:24.000Z | 2020-06-23T12:31:32.000Z | apps/cashtrail/test/support/factory/helpers.ex | maxmaccari/cashtrail | 74ec7ff1f8a41299a6086dc98719a32cbfbe9ee7 | [
"MIT"
] | 16 | 2020-05-02T00:07:51.000Z | 2021-06-11T01:54:11.000Z | apps/cashtrail/test/support/factory/helpers.ex | maxmaccari/cashtrail | 74ec7ff1f8a41299a6086dc98719a32cbfbe9ee7 | [
"MIT"
] | null | null | null | defmodule Cashtrail.Factory.Helpers do
@moduledoc false
def forget(_, _, cardinality \\ :one)
def forget(struct, fields, cardinality) when is_list(fields),
do:
fields
|> Enum.reduce(struct, fn field, acc ->
forget(acc, field, cardinality)
end)
def forget(struct, field, cardinality) do
%{
struct
| field => %Ecto.Association.NotLoaded{
__field__: field,
__owner__: struct.__struct__,
__cardinality__: cardinality
}
}
end
def put_tenant(struct, %{tenant: tenant}) do
Ecto.put_meta(struct, prefix: Cashtrail.Entities.Tenants.to_prefix(tenant))
end
def put_tenant(struct, _), do: struct
def drop_tenant(%{tenant: _} = attrs) do
Map.drop(attrs, [:tenant])
end
def drop_tenant(attrs), do: attrs
end
| 22.75 | 79 | 0.641026 |
7928b9a42073d33e7c2ca35b0b13c3eea88604a5 | 6,049 | ex | Elixir | apps/nerves_hub_www/lib/nerves_hub_www_web/live/device_live/show.ex | acrogenesis/nerves_hub_web | 27f651dd64b40a034254b50805884f4efd679957 | [
"Apache-2.0"
] | null | null | null | apps/nerves_hub_www/lib/nerves_hub_www_web/live/device_live/show.ex | acrogenesis/nerves_hub_web | 27f651dd64b40a034254b50805884f4efd679957 | [
"Apache-2.0"
] | null | null | null | apps/nerves_hub_www/lib/nerves_hub_www_web/live/device_live/show.ex | acrogenesis/nerves_hub_web | 27f651dd64b40a034254b50805884f4efd679957 | [
"Apache-2.0"
] | null | null | null | defmodule NervesHubWWWWeb.DeviceLive.Show do
use NervesHubWWWWeb, :live_view
alias NervesHubDevice.Presence
alias NervesHubWebCore.{Accounts, AuditLogs, Devices, Devices.Device, Repo, Products}
alias Phoenix.Socket.Broadcast
def render(assigns) do
NervesHubWWWWeb.DeviceView.render("show.html", assigns)
end
def mount(
_params,
%{
"auth_user_id" => user_id,
"org_id" => org_id,
"product_id" => product_id,
"device_id" => device_id
},
socket
) do
socket =
socket
|> assign_new(:user, fn -> Accounts.get_user!(user_id) end)
|> assign_new(:org, fn -> Accounts.get_org!(org_id) end)
|> assign_new(:product, fn -> Products.get_product!(product_id) end)
|> assign_new(:device, fn -> Devices.get_device!(device_id) end)
if connected?(socket) do
socket.endpoint.subscribe("device:#{socket.assigns.device.id}")
socket.endpoint.subscribe("product:#{product_id}:devices")
end
socket =
socket
|> assign(:device, sync_device(socket.assigns.device))
|> audit_log_assigns()
{:ok, socket}
rescue
e ->
socket_error(socket, live_view_error(e))
end
# Catch-all to handle when LV sessions change.
# Typically this is after a deploy when the
# session structure in the module has changed
# for mount/3
def mount(_, _, socket) do
socket_error(socket, live_view_error(:update))
end
def handle_info(
%Broadcast{event: "presence_diff", payload: payload},
%{assigns: %{device: device}} = socket
) do
{:noreply, assign(socket, :device, sync_device(device, payload))}
end
# Ignore unknown messages
def handle_info(_unknown, socket), do: {:noreply, socket}
def handle_event("reboot", _value, %{assigns: %{device: device, user: user}} = socket) do
user = Repo.preload(user, :org_users)
case Enum.find(user.org_users, &(&1.org_id == device.org_id)) do
%{role: :admin} -> do_reboot(socket, :allowed)
_ -> do_reboot(socket, :blocked)
end
end
def handle_event(
"paginate",
%{"page" => page_num},
%{assigns: %{audit_log_ids: ids, paginate_opts: paginate_opts}} = socket
) do
# This LiveView stores an array of all its audit log's ids. On paginate
# call, it gets the the index offset based on the page it is currently on
# then slices out the number of ids equal to the set page_size starting
# at that index. Then we query AuditLogs for only those specific records
page_num = String.to_integer(page_num)
start_index = (page_num - 1) * paginate_opts.page_size
audit_logs = Enum.slice(ids, start_index, paginate_opts.page_size) |> AuditLogs.from_ids()
socket =
socket
|> assign(:audit_logs, audit_logs)
|> assign(:paginate_opts, %{paginate_opts | page_number: page_num})
{:noreply, socket}
end
def handle_event(
"toggle_health_state",
_params,
%{assigns: %{device: device, user: user}} = socket
) do
params = %{healthy: !device.healthy}
socket =
case Devices.update_device(device, params) do
{:ok, updated_device} ->
AuditLogs.audit!(user, device, :update, params)
assign(socket, :device, updated_device)
{:error, _changeset} ->
put_flash(socket, :error, "Failed to mark health state")
end
{:noreply, socket}
end
defp audit_log_assigns(%{assigns: %{device: device}} = socket) do
all_logs = AuditLogs.logs_for_feed(device)
paginate_opts = %{page_number: 1, page_size: 10}
socket
|> assign(:audit_logs, Enum.slice(all_logs, 0, paginate_opts.page_size))
|> assign(:audit_log_ids, Enum.map(all_logs, & &1.id))
|> assign(:paginate_opts, paginate_opts)
|> assign(:resource_id, device.id)
end
defp do_reboot(socket, :allowed) do
AuditLogs.audit!(socket.assigns.user, socket.assigns.device, :update, %{reboot: true})
socket.endpoint.broadcast_from(self(), "device:#{socket.assigns.device.id}", "reboot", %{})
socket =
socket
|> put_flash(:info, "Device Reboot Requested")
|> assign(:device, %{socket.assigns.device | status: "reboot-requested"})
{:noreply, socket}
end
defp do_reboot(socket, :blocked) do
msg = "User not authorized to reboot this device"
AuditLogs.audit!(socket.assigns.user, socket.assigns.device, :update, %{
reboot: false,
message: msg
})
socket =
socket
|> put_flash(:error, msg)
|> assign(:device, %{socket.assigns.device | status: "reboot-blocked"})
{:noreply, socket}
end
defp sync_device(device, payload \\ nil)
defp sync_device(%{device: device}, payload), do: sync_device(device, payload)
defp sync_device(%{assigns: %{device: device}}, payload), do: sync_device(device, payload)
defp sync_device(%Device{id: id} = device, nil) do
joins = Map.put(%{}, to_string(id), Presence.find(device))
sync_device(device, %{joins: joins})
end
defp sync_device(%Device{id: id} = device, payload) when is_map(payload) do
id = to_string(id)
joins = Map.get(payload, :joins, %{})
leaves = Map.get(payload, :leaves, %{})
cond do
meta = joins[id] ->
updates =
Map.take(meta, [
:console_available,
:firmware_metadata,
:fwup_progress,
:last_communication,
:status
])
Map.merge(device, updates)
leaves[id] ->
# We're counting a device leaving as its last_communication. This is
# slightly inaccurate to set here, but only by a minuscule amount
# and saves DB calls and broadcasts
disconnect_time = DateTime.truncate(DateTime.utc_now(), :second)
device
|> Map.put(:console_available, false)
|> Map.put(:fwup_progress, nil)
|> Map.put(:last_communication, disconnect_time)
|> Map.put(:status, "offline")
true ->
device
end
end
end
| 30.396985 | 95 | 0.637626 |
7928d10bb2bd69dd128744d5b9c3d06685b271dc | 43 | ex | Elixir | lib/ttf_auth.ex | Waasi/keylex | f5220e310d3a7d474f4aa8ca5f462879d80e66c1 | [
"MIT"
] | 1 | 2017-10-20T00:55:04.000Z | 2017-10-20T00:55:04.000Z | lib/ttf_auth.ex | Waasi/ttf_auth | f5220e310d3a7d474f4aa8ca5f462879d80e66c1 | [
"MIT"
] | null | null | null | lib/ttf_auth.ex | Waasi/ttf_auth | f5220e310d3a7d474f4aa8ca5f462879d80e66c1 | [
"MIT"
] | null | null | null | defmodule Keylex do
@moduledoc false
end
| 10.75 | 19 | 0.790698 |
792923716c2d71c82640dc9a625bdd43292361a6 | 1,938 | ex | Elixir | lib/dark_matter/decimals/comparison.ex | dark-elixir/dark_matter | 3f70edf4220ad1c066489110ef30880a143522fd | [
"Apache-2.0"
] | 2 | 2020-12-01T21:33:44.000Z | 2021-05-29T14:51:18.000Z | lib/dark_matter/decimals/comparison.ex | dark-elixir/dark_matter | 3f70edf4220ad1c066489110ef30880a143522fd | [
"Apache-2.0"
] | null | null | null | lib/dark_matter/decimals/comparison.ex | dark-elixir/dark_matter | 3f70edf4220ad1c066489110ef30880a143522fd | [
"Apache-2.0"
] | 2 | 2020-09-02T14:36:58.000Z | 2021-04-22T11:20:43.000Z | defmodule DarkMatter.Decimals.Comparison do
@moduledoc """
Decimal comparison functions
"""
@moduledoc since: "1.0.8"
import DarkMatter.Guards, only: [is_numeric: 1]
alias DarkMatter.Decimals.Conversion
alias DarkMatter.Deps
@type comparison() :: :eq | :gt | :lt
@doc """
Determines if two `t:DarkMatter.DarkMatter.numeric/0` are equivalent.
"""
@spec decimal_equal?(DarkMatter.numeric(), DarkMatter.numeric()) :: boolean()
def decimal_equal?(x, y) do
decimal_compare(x, y) == :eq
end
@doc """
Compare two decimals `x` and `y`.
"""
@spec decimal_compare(DarkMatter.numeric(), DarkMatter.numeric()) :: comparison()
cond do
Deps.version_match?(:decimal, ">= 2.0.0") ->
def decimal_compare(x, y) when is_numeric(x) and is_numeric(y) do
Decimal.compare(Conversion.cast_decimal(x), Conversion.cast_decimal(y))
end
Deps.version_match?(:decimal, ">= 1.0.0") ->
@spec decimal_compare(DarkMatter.numeric(), DarkMatter.numeric()) :: comparison()
def decimal_compare(x, y) when is_numeric(x) and is_numeric(y) do
Decimal.cmp(Conversion.cast_decimal(x), Conversion.cast_decimal(y))
end
end
@doc """
Parse a given `binary` into a `t:Decimal.t/0`.
"""
@spec decimal_parse(String.t()) :: Decimal.t() | :error
cond do
Deps.version_match?(:decimal, ">= 2.0.0") ->
def decimal_parse(binary) when is_binary(binary) do
case Decimal.parse(binary) do
{%Decimal{} = normal_decimal, ""} -> normal_decimal
{%Decimal{} = _normal_decimal, _rounding} -> :error
end
end
Deps.version_match?(:decimal, ">= 1.0.0") ->
@spec decimal_parse(String.t()) :: Decimal.t() | :error
def decimal_parse(binary) when is_binary(binary) do
case Decimal.parse(binary) do
{:ok, %Decimal{} = normal_decimal} -> normal_decimal
:error -> :error
end
end
end
end
| 31.258065 | 87 | 0.638803 |
79292f746f5858ea5e8754480892a3fd8b09a25f | 1,397 | exs | Elixir | test/adapters/sendgrid_test.exs | edisonywh/receivex | 605fe45e8c8e0d9c29cc04f54918839c4c79656d | [
"MIT"
] | null | null | null | test/adapters/sendgrid_test.exs | edisonywh/receivex | 605fe45e8c8e0d9c29cc04f54918839c4c79656d | [
"MIT"
] | null | null | null | test/adapters/sendgrid_test.exs | edisonywh/receivex | 605fe45e8c8e0d9c29cc04f54918839c4c79656d | [
"MIT"
] | null | null | null | defmodule Receivex.Adapter.SendgridTest do
use ExUnit.Case
use Plug.Test
alias Receivex.Adapter
defp setup_webhook do
conn = conn(:post, "/_incoming", "raw_body")
params = "./test/fixtures/sendgrid.json" |> File.read!() |> Jason.decode!()
%{conn | body_params: params}
end
test "processes valid webhook" do
conn = setup_webhook()
{:ok, _conn} = Adapter.Sendgrid.handle_webhook(conn, TestProcessor, [])
assert_receive {:email, %Receivex.Email{}}
end
test "normalizes email" do
params = Jason.decode!(File.read!("./test/fixtures/sendgrid.json"))
assert %Receivex.Email{
from: {"Sender Name", "[email protected]"},
html:
"<div dir=\"ltr\">Here's an email with multiple attachments<div><br></div><div><img src=\"cid:ii_1562e2169c132d83\" alt=\"Inline image 1\" width=\"455\" height=\"544\"><br clear=\"all\"><div><br></div>-- <br><div class=\"gmail_signature\" data-smartmail=\"gmail_signature\"><div dir=\"ltr\"><img src=\"https: //sendgrid.com/brand/sg-logo-email.png\" width=\"96\" height=\"17\"><br><div><br></div></div></div>\n</div></div>",
sender: nil,
subject: "Different File Types",
text: "Here's an email with multiple attachments",
to: [{nil, "[email protected]"}]
} == Adapter.Sendgrid.normalize_params(params)
end
end
| 38.805556 | 443 | 0.624195 |
792934843426c292fea2747fffefd497b892e325 | 571 | ex | Elixir | 2020/elixir/key_value_store/lib/server.ex | herminiotorres/programmer_passport | d1786518a3a5f82471457e0ace41c4c33343739a | [
"MIT"
] | null | null | null | 2020/elixir/key_value_store/lib/server.ex | herminiotorres/programmer_passport | d1786518a3a5f82471457e0ace41c4c33343739a | [
"MIT"
] | null | null | null | 2020/elixir/key_value_store/lib/server.ex | herminiotorres/programmer_passport | d1786518a3a5f82471457e0ace41c4c33343739a | [
"MIT"
] | null | null | null | defmodule KeyValueStore.Server do
alias KeyValueStore.Core
def start do
spawn(fn ->
run(Core.new())
end)
end
def run(store) do
store
|> listen
|> run
end
def listen(store) do
receive do
{:put, key, value} ->
Core.add_or_update(store, key, value)
{:delete, key} ->
Core.delete(store, key)
{:get, pid, key} ->
value = Core.retrieve(store, key)
send(pid, {:value, value})
store
{:state, pid} ->
send(pid, {:store, store})
store
end
end
end
| 16.314286 | 45 | 0.530648 |
79298b6f107e85e179ee959ef5e7a6dc565d76d8 | 5,020 | exs | Elixir | config/runtime.exs | aboutphilippe/papercups | 30364cc562f41cd9d5a7ca5357b16ab8484bd5c9 | [
"MIT"
] | null | null | null | config/runtime.exs | aboutphilippe/papercups | 30364cc562f41cd9d5a7ca5357b16ab8484bd5c9 | [
"MIT"
] | null | null | null | config/runtime.exs | aboutphilippe/papercups | 30364cc562f41cd9d5a7ca5357b16ab8484bd5c9 | [
"MIT"
] | null | null | null | import Config
require_db_ssl =
case System.get_env("REQUIRE_DB_SSL") do
"true" -> true
"false" -> false
_ -> true
end
socket_options =
case System.get_env("USE_IP_V6") do
"true" -> [:inet6]
"false" -> [:inet]
_ -> [:inet]
end
pool_size = String.to_integer(System.get_env("POOL_SIZE") || "10")
if config_env() === :prod do
database_url =
System.get_env("DATABASE_URL") ||
raise """
environment variable DATABASE_URL is missing.
For example: ecto://USER:PASS@HOST/DATABASE
"""
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
backend_url =
System.get_env("BACKEND_URL") ||
raise """
environment variable BACKEND_URL is missing.
For example: myselfhostedwebsite.com or papercups.io
"""
# Configure your database
config :chat_api, ChatApi.Repo,
ssl: require_db_ssl,
url: database_url,
show_sensitive_data_on_connection_error: false,
socket_options: socket_options,
pool_size: pool_size
ssl_key_path = System.get_env("SSL_KEY_PATH")
ssl_cert_path = System.get_env("SSL_CERT_PATH")
https = (ssl_cert_path && ssl_key_path) != nil
port = String.to_integer(System.get_env("PORT") || "4000")
config :chat_api, ChatApiWeb.Endpoint,
http: [
port: port,
compress: true,
transport_options: [socket_opts: [:inet6]]
],
url: [host: backend_url],
pubsub_server: ChatApi.PubSub,
secret_key_base: secret_key_base,
server: true,
check_origin: false
if https do
config :chat_api, ChatApiWeb.Endpoint,
https: [
port: 443,
cipher_suite: :strong,
otp_app: :chat_api,
keyfile: ssl_key_path,
certfile: ssl_cert_path
],
force_ssl: [rewrite_on: [:x_forwarded_proto]]
end
end
# Optional
sentry_dsn = System.get_env("SENTRY_DSN")
mailer_adapter = System.get_env("MAILER_ADAPTER", "Swoosh.Adapters.Local")
# Configure Sentry
config :sentry,
dsn: sentry_dsn,
environment_name: config_env(),
included_environments: [:prod],
enable_source_code_context: true,
root_source_code_path: File.cwd!()
config :logger,
backends: [:console, Sentry.LoggerBackend]
config :logger, Sentry.LoggerBackend,
# Also send warn messages
level: :warn,
# Send messages from Plug/Cowboy
excluded_domains: [],
# Send messages like `Logger.error("error")` to Sentry
capture_log_messages: true
case mailer_adapter do
"Swoosh.Adapters.Mailgun" ->
config :chat_api, ChatApi.Mailers,
adapter: Swoosh.Adapters.Mailgun,
api_key: System.get_env("MAILGUN_API_KEY"),
# Domain is the email address that mailgun is sent from
domain: System.get_env("DOMAIN")
"Swoosh.Adapters.SMTP" ->
config :chat_api, ChatApi.Mailers,
adapter: Swoosh.Adapters.SMTP,
relay: System.get_env("SMTP_HOST_ADDR", "mail"),
port: System.get_env("SMTP_HOST_PORT", "25"),
username: System.get_env("SMTP_USER_NAME"),
password: System.get_env("SMTP_USER_PWD"),
ssl: System.get_env("SMTP_HOST_SSL_ENABLED") || false,
tls: :if_available,
retries: System.get_env("SMTP_RETRIES") || 2,
no_mx_lookups: System.get_env("SMTP_MX_LOOKUPS_ENABLED") || true
"Swoosh.Adapters.Local" ->
config :swoosh,
serve_mailbox: System.get_env("LOCAL_SERVE_MAILBOX", "false") == "true",
preview_port: System.get_env("LOCAL_MAILBOX_PREVIEW_PORT", "1234") |> String.to_integer()
config :chat_api, ChatApi.Mailers, adapter: Swoosh.Adapters.Local
_ ->
raise "Unknown mailer_adapter; expected Swoosh.Adapters.Mailgun or Swoosh.Adapters.SMTP"
end
site_id = System.get_env("CUSTOMER_IO_SITE_ID")
customerio_api_key = System.get_env("CUSTOMER_IO_API_KEY")
config :customerio,
site_id: site_id,
api_key: customerio_api_key
aws_key_id = System.get_env("AWS_ACCESS_KEY_ID")
aws_secret_key = System.get_env("AWS_SECRET_ACCESS_KEY")
bucket_name = System.get_env("BUCKET_NAME", "papercups-files")
region = System.get_env("AWS_REGION")
function_bucket_name = System.get_env("FUNCTION_BUCKET_NAME", "")
function_role = System.get_env("FUNCTION_ROLE", "")
aws_account_id = System.get_env("AWS_ACCOUNT_ID", "")
config :chat_api,
bucket_name: bucket_name,
region: region,
function_bucket_name: function_bucket_name,
aws_account_id: aws_account_id,
function_role: function_role
config :ex_aws,
access_key_id: aws_key_id,
secret_access_key: aws_secret_key,
region: region,
s3: [
scheme: "https://",
region: region
]
if System.get_env("APPSIGNAL_API_KEY") do
config :appsignal, :config,
otp_app: :chat_api,
name: "chat_api",
push_api_key: System.get_env("APPSIGNAL_API_KEY"),
env: Mix.env(),
active: true
end
case System.get_env("PAPERCUPS_STRIPE_SECRET") do
"sk_" <> _rest = api_key ->
config :stripity_stripe, api_key: api_key
_ ->
nil
end
| 27.888889 | 95 | 0.698207 |
792991462daf48e42d6e3f9a4b71168e06a9a5ae | 1,526 | ex | Elixir | test/support/data_case.ex | enlego/hasher | 43b528059119971ffbca6538ffb6713fb78e077f | [
"MIT"
] | null | null | null | test/support/data_case.ex | enlego/hasher | 43b528059119971ffbca6538ffb6713fb78e077f | [
"MIT"
] | null | null | null | test/support/data_case.ex | enlego/hasher | 43b528059119971ffbca6538ffb6713fb78e077f | [
"MIT"
] | null | null | null | defmodule Hasher.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use Hasher.DataCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Hasher.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Hasher.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Hasher.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Hasher.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Regex.replace(~r"%{(\w+)}", message, fn _, key ->
opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
end)
end)
end
end
| 27.25 | 77 | 0.687418 |
79299e9ddb8246b4b75011a25cb2cfcf5301c043 | 1,358 | ex | Elixir | web/gettext.ex | ikeikeikeike/panglao | 6d3f6515d9f1ceb9a2e771ae2d54c222cedbf538 | [
"MIT"
] | 1 | 2017-02-18T21:20:17.000Z | 2017-02-18T21:20:17.000Z | web/gettext.ex | ikeikeikeike/panglao | 6d3f6515d9f1ceb9a2e771ae2d54c222cedbf538 | [
"MIT"
] | null | null | null | web/gettext.ex | ikeikeikeike/panglao | 6d3f6515d9f1ceb9a2e771ae2d54c222cedbf538 | [
"MIT"
] | null | null | null | defmodule Panglao.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import Panglao.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :panglao
def default_locale do
Application.get_env(:panglao, Panglao.Gettext)[:default_locale] || "ja"
end
def find_locale(language_tag) do
[language | _] =
language_tag
|> String.downcase
|> String.split("-", parts: 2)
Gettext.known_locales(__MODULE__)
if language in Gettext.known_locales(__MODULE__) do
language
else
nil
end
end
def supported_locales do
known = Gettext.known_locales(Panglao.Gettext)
allowed = Application.get_env(:panglao, Panglao.Gettext)[:locales]
MapSet.intersection(Enum.into(known, MapSet.new), Enum.into(allowed, MapSet.new))
|> MapSet.to_list
end
end
| 25.148148 | 85 | 0.67894 |
7929a386e3a6f7383bbfe970e052348014240b05 | 2,198 | exs | Elixir | test/test_helper.exs | superhawk610/oban | 9e87ca0e45451efee05db1d430739d348c67acdb | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | superhawk610/oban | 9e87ca0e45451efee05db1d430739d348c67acdb | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | superhawk610/oban | 9e87ca0e45451efee05db1d430739d348c67acdb | [
"Apache-2.0"
] | null | null | null | Logger.configure(level: :warn)
ExUnit.start()
Oban.Test.Repo.start_link()
defmodule Oban.Case do
@moduledoc false
use ExUnit.CaseTemplate
alias Oban.Integration.Worker
alias Oban.Job
alias Oban.Test.Repo
using do
quote do
use ExUnitProperties
import Oban.Case
alias Oban.Integration.Worker
alias Oban.{Config, Job}
alias Repo
end
end
setup tags do
# We are intentionally avoiding Sandbox mode for testing. Within Sandbox mode everything
# happens in a transaction, which prevents the use of LISTEN/NOTIFY messages.
if tags[:integration] do
Repo.delete_all(Job)
Repo.delete_all(Job, prefix: "private")
on_exit(fn ->
Repo.delete_all(Job)
Repo.delete_all(Job, prefix: "private")
end)
end
{:ok, %{}}
end
def start_supervised_oban!(opts) do
opts =
opts
|> Keyword.put_new(:name, Oban)
|> Keyword.put_new(:repo, Repo)
|> Keyword.put_new(:poll_interval, 25)
|> Keyword.put_new(:shutdown_grace_period, 1)
start_supervised!({Oban, opts}, id: opts[:name])
end
def build(args, opts \\ []) do
if opts[:worker] do
Job.new(args, opts)
else
Worker.new(args, opts)
end
end
def insert!(args, opts \\ []) do
args
|> build(opts)
|> Repo.insert!()
end
def seconds_from_now(seconds) do
DateTime.add(DateTime.utc_now(), seconds, :second)
end
def seconds_ago(seconds) do
DateTime.add(DateTime.utc_now(), -seconds)
end
def with_backoff(opts \\ [], fun) do
total = Keyword.get(opts, :total, 100)
sleep = Keyword.get(opts, :sleep, 10)
with_backoff(fun, 0, total, sleep)
end
def with_backoff(fun, count, total, sleep) do
fun.()
rescue
exception in [ExUnit.AssertionError] ->
if count < total do
Process.sleep(sleep)
with_backoff(fun, count + 1, total, sleep)
else
reraise(exception, System.stacktrace())
end
end
def mangle_jobs_table! do
Repo.query!("ALTER TABLE oban_jobs RENAME TO oban_missing")
end
def reform_jobs_table! do
Repo.query!("ALTER TABLE oban_missing RENAME TO oban_jobs")
end
end
| 20.933333 | 92 | 0.644677 |
7929a58f4ba38f84023113e4903b0035ff723184 | 1,069 | exs | Elixir | mix.exs | suranyami/google_search_ex | 8924631dd1379e0147723077c540ab03d1514ef3 | [
"MIT"
] | null | null | null | mix.exs | suranyami/google_search_ex | 8924631dd1379e0147723077c540ab03d1514ef3 | [
"MIT"
] | null | null | null | mix.exs | suranyami/google_search_ex | 8924631dd1379e0147723077c540ab03d1514ef3 | [
"MIT"
] | null | null | null | defmodule GoogleSearchEx.Mixfile do
use Mix.Project
def project do
[app: :google_search_ex,
description: "An Elixir library for searching with Google.",
version: "0.0.1",
elixir: "~> 1.0",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
package: package,
deps: deps]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type `mix help deps` for more examples and options
defp deps do
[]
end
defp package do
[# These are the default files included in the package
files: ["lib", "priv", "mix.exs", "README*", "readme*", "LICENSE*", "license*"],
contributors: ["David Parry"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/suranyami/google_search_ex"}]
end
end
| 24.295455 | 83 | 0.626754 |
7929a623066103c8010878bebcf48ac05f063072 | 1,833 | exs | Elixir | clients/cloud_build/mix.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | clients/cloud_build/mix.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | clients/cloud_build/mix.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudBuild.Mixfile do
use Mix.Project
@version "0.44.0"
def project() do
[
app: :google_api_cloud_build,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/cloud_build"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Cloud Build API client library. Creates and manages builds on Google Cloud Platform.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/cloud_build",
"Homepage" => "https://cloud.google.com/cloud-build/docs/"
}
]
end
end
| 27.358209 | 102 | 0.654664 |
7929af97bdc1e32e41cd4e35d8acbc23e7320d89 | 391 | ex | Elixir | lib/nectar/route_extender.ex | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | 356 | 2016-03-16T12:37:28.000Z | 2021-12-18T03:22:39.000Z | lib/nectar/route_extender.ex | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | 30 | 2016-03-16T09:19:10.000Z | 2021-01-12T08:10:52.000Z | lib/nectar/route_extender.ex | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | 72 | 2016-03-16T13:32:14.000Z | 2021-03-23T11:27:43.000Z | defmodule Nectar.RouteExtender do
defmacro __using__(_opts) do
case Code.ensure_loaded(ExtensionsManager.Router) do
{:module, module} -> mount_router(module)
{:error, _reason} -> do_nothing()
end
end
def mount_router(module) do
quote do
require unquote(module)
unquote(module).mount
end
end
def do_nothing do
quote do
end
end
end
| 19.55 | 56 | 0.672634 |
792a0e9c6f699ddc7e3c4f613f03dead61554d1f | 1,536 | ex | Elixir | web/controllers/region_controller.ex | rob05c/tox | f54847ca058ad24b909341ad65d595a4069d2471 | [
"Apache-2.0"
] | 2 | 2016-11-16T17:24:21.000Z | 2019-02-15T05:38:27.000Z | web/controllers/region_controller.ex | rob05c/tox | f54847ca058ad24b909341ad65d595a4069d2471 | [
"Apache-2.0"
] | null | null | null | web/controllers/region_controller.ex | rob05c/tox | f54847ca058ad24b909341ad65d595a4069d2471 | [
"Apache-2.0"
] | null | null | null | defmodule Tox.RegionController do
use Tox.Web, :controller
alias Tox.Region
def index(conn, _params) do
regions = Repo.all(Region)
render(conn, "index.json", regions: regions)
end
def create(conn, %{"region" => region_params}) do
changeset = Region.changeset(%Region{}, region_params)
case Repo.insert(changeset) do
{:ok, region} ->
conn
|> put_status(:created)
|> put_resp_header("location", region_path(conn, :show, region))
|> render("show.json", region: region)
{:error, changeset} ->
conn
|> put_status(:unprocessable_entity)
|> render(Tox.ChangesetView, "error.json", changeset: changeset)
end
end
def show(conn, %{"id" => id}) do
region = Repo.get!(Region, id)
render(conn, "show.json", region: region)
end
def update(conn, %{"id" => id, "region" => region_params}) do
region = Repo.get!(Region, id)
changeset = Region.changeset(region, region_params)
case Repo.update(changeset) do
{:ok, region} ->
render(conn, "show.json", region: region)
{:error, changeset} ->
conn
|> put_status(:unprocessable_entity)
|> render(Tox.ChangesetView, "error.json", changeset: changeset)
end
end
def delete(conn, %{"id" => id}) do
region = Repo.get!(Region, id)
# Here we use delete! (with a bang) because we expect
# it to always work (and if it does not, it will raise).
Repo.delete!(region)
send_resp(conn, :no_content, "")
end
end
| 27.428571 | 72 | 0.61849 |
792a14f92af6d9911aabc44958f5aff03af72c9c | 820 | ex | Elixir | chapter2/exlivery/lib/orders/order.ex | mCodex/rocketseat-ignite-elixir | bdb48db778c36b2325c75a41b4d6f7ef77b03cf5 | [
"MIT"
] | 1 | 2021-07-23T19:48:27.000Z | 2021-07-23T19:48:27.000Z | chapter2/exlivery/lib/orders/order.ex | mCodex/rocketseat-ignite-elixir | bdb48db778c36b2325c75a41b4d6f7ef77b03cf5 | [
"MIT"
] | null | null | null | chapter2/exlivery/lib/orders/order.ex | mCodex/rocketseat-ignite-elixir | bdb48db778c36b2325c75a41b4d6f7ef77b03cf5 | [
"MIT"
] | null | null | null | defmodule Exlivery.Orders.Order do
alias Exlivery.Orders.Item
alias Exlivery.Users.User
@keys [:user_cpf, :delivery_address, :items, :total_price]
@enforce_keys @keys
defstruct @keys
def build(
%User{
cpf: cpf,
address: address
},
[%Item{} | _items] = items
) do
{:ok,
%__MODULE__{
user_cpf: cpf,
delivery_address: address,
items: items,
total_price: calculate_total_price(items)
}}
end
def build(_user, _items), do: {:error, "Invalid Parameters"}
defp calculate_total_price(items) do
Enum.reduce(items, Decimal.new("0.00"), &sum_prices(&1, &2))
end
defp sum_prices(%Item{unity_price: price, quantity: quantity}, acc) do
price
|> Decimal.mult(quantity)
|> Decimal.add(acc)
end
end
| 21.025641 | 72 | 0.620732 |
792a4ca59efb11590ebf284ca3d86a4757acd53e | 7,433 | ex | Elixir | clients/speech/lib/google_api/speech/v1/model/recognition_config.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/speech/lib/google_api/speech/v1/model/recognition_config.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/speech/lib/google_api/speech/v1/model/recognition_config.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Speech.V1.Model.RecognitionConfig do
@moduledoc """
Provides information to the recognizer that specifies how to process the request.
## Attributes
- enableAutomaticPunctuation (boolean()): *Optional* If 'true', adds punctuation to recognition result hypotheses. This feature is only available in select languages. Setting this for requests in other languages has no effect at all. The default 'false' value does not add punctuation to result hypotheses. Note: This is currently offered as an experimental service, complimentary to all users. In the future this may be exclusively available as a premium feature. Defaults to: `null`.
- enableWordTimeOffsets (boolean()): *Optional* If `true`, the top result includes a list of words and the start and end time offsets (timestamps) for those words. If `false`, no word-level time offset information is returned. The default is `false`. Defaults to: `null`.
- encoding (String.t): Encoding of audio data sent in all `RecognitionAudio` messages. This field is optional for `FLAC` and `WAV` audio files and required for all other audio formats. For details, see AudioEncoding. Defaults to: `null`.
- Enum - one of [ENCODING_UNSPECIFIED, LINEAR16, FLAC, MULAW, AMR, AMR_WB, OGG_OPUS, SPEEX_WITH_HEADER_BYTE]
- languageCode (String.t): *Required* The language of the supplied audio as a [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag. Example: \"en-US\". See [Language Support](/speech-to-text/docs/languages) for a list of the currently supported language codes. Defaults to: `null`.
- maxAlternatives (integer()): *Optional* Maximum number of recognition hypotheses to be returned. Specifically, the maximum number of `SpeechRecognitionAlternative` messages within each `SpeechRecognitionResult`. The server may return fewer than `max_alternatives`. Valid values are `0`-`30`. A value of `0` or `1` will return a maximum of one. If omitted, will return a maximum of one. Defaults to: `null`.
- model (String.t): *Optional* Which model to select for the given request. Select the model best suited to your domain to get best results. If a model is not explicitly specified, then we auto-select a model based on the parameters in the RecognitionConfig. <table> <tr> <td><b>Model</b></td> <td><b>Description</b></td> </tr> <tr> <td><code>command_and_search</code></td> <td>Best for short queries such as voice commands or voice search.</td> </tr> <tr> <td><code>phone_call</code></td> <td>Best for audio that originated from a phone call (typically recorded at an 8khz sampling rate).</td> </tr> <tr> <td><code>video</code></td> <td>Best for audio that originated from from video or includes multiple speakers. Ideally the audio is recorded at a 16khz or greater sampling rate. This is a premium model that costs more than the standard rate.</td> </tr> <tr> <td><code>default</code></td> <td>Best for audio that is not one of the specific audio models. For example, long-form audio. Ideally the audio is high-fidelity, recorded at a 16khz or greater sampling rate.</td> </tr> </table> Defaults to: `null`.
- profanityFilter (boolean()): *Optional* If set to `true`, the server will attempt to filter out profanities, replacing all but the initial character in each filtered word with asterisks, e.g. \"f***\". If set to `false` or omitted, profanities won't be filtered out. Defaults to: `null`.
- sampleRateHertz (integer()): Sample rate in Hertz of the audio data sent in all `RecognitionAudio` messages. Valid values are: 8000-48000. 16000 is optimal. For best results, set the sampling rate of the audio source to 16000 Hz. If that's not possible, use the native sample rate of the audio source (instead of re-sampling). This field is optional for `FLAC` and `WAV` audio files and required for all other audio formats. For details, see AudioEncoding. Defaults to: `null`.
- speechContexts ([SpeechContext]): *Optional* array of SpeechContext. A means to provide context to assist the speech recognition. For more information, see [Phrase Hints](/speech-to-text/docs/basics#phrase-hints). Defaults to: `null`.
- useEnhanced (boolean()): *Optional* Set to true to use an enhanced model for speech recognition. If `use_enhanced` is set to true and the `model` field is not set, then an appropriate enhanced model is chosen if: 1. project is eligible for requesting enhanced models 2. an enhanced model exists for the audio If `use_enhanced` is true and an enhanced version of the specified model does not exist, then the speech is recognized using the standard version of the specified model. Enhanced speech models require that you opt-in to data logging using instructions in the [documentation](/speech-to-text/docs/enable-data-logging). If you set `use_enhanced` to true and you have not enabled audio logging, then you will receive an error. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:enableAutomaticPunctuation => any(),
:enableWordTimeOffsets => any(),
:encoding => any(),
:languageCode => any(),
:maxAlternatives => any(),
:model => any(),
:profanityFilter => any(),
:sampleRateHertz => any(),
:speechContexts => list(GoogleApi.Speech.V1.Model.SpeechContext.t()),
:useEnhanced => any()
}
field(:enableAutomaticPunctuation)
field(:enableWordTimeOffsets)
field(:encoding)
field(:languageCode)
field(:maxAlternatives)
field(:model)
field(:profanityFilter)
field(:sampleRateHertz)
field(:speechContexts, as: GoogleApi.Speech.V1.Model.SpeechContext, type: :list)
field(:useEnhanced)
end
defimpl Poison.Decoder, for: GoogleApi.Speech.V1.Model.RecognitionConfig do
def decode(value, options) do
GoogleApi.Speech.V1.Model.RecognitionConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Speech.V1.Model.RecognitionConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 97.802632 | 1,480 | 0.727432 |
792a5eb62252b36f4cd5bacc4b542a4d31cf0d77 | 307 | exs | Elixir | ListsAndRecursion/7.exs | RckMrkr/Programming-Elixir | 0aa6d8e3233d4631db54e4103a206bff126cb953 | [
"MIT"
] | null | null | null | ListsAndRecursion/7.exs | RckMrkr/Programming-Elixir | 0aa6d8e3233d4631db54e4103a206bff126cb953 | [
"MIT"
] | null | null | null | ListsAndRecursion/7.exs | RckMrkr/Programming-Elixir | 0aa6d8e3233d4631db54e4103a206bff126cb953 | [
"MIT"
] | null | null | null | defmodule Primes do
import MyList, only: [span: 2]
defp _isPrime(n, 1), do: true
defp _isPrime(n, test) when rem(n, test) == 0, do: false
defp _isPrime(n, test), do: _isPrime(n, test - 1)
defp isPrime(n), do: _isPrime(n, div(n, 2))
def allUpTo(n) do
for x <- span(2, n), isPrime(x), do: x
end
end | 25.583333 | 57 | 0.638436 |
792a69c77906ffc137d88d5180feb22eaee63dba | 5,631 | ex | Elixir | lib/imgproxy.ex | lucacorti/imgproxy | 28401af483a5ff77db01dc03cbd3bead2c2acd32 | [
"MIT"
] | null | null | null | lib/imgproxy.ex | lucacorti/imgproxy | 28401af483a5ff77db01dc03cbd3bead2c2acd32 | [
"MIT"
] | null | null | null | lib/imgproxy.ex | lucacorti/imgproxy | 28401af483a5ff77db01dc03cbd3bead2c2acd32 | [
"MIT"
] | null | null | null | defmodule Imgproxy do
@moduledoc """
`Imgproxy` generates urls for use with an [imgproxy](https://imgproxy.net) server.
"""
defstruct source_url: nil, options: [], extension: nil, prefix: nil, key: nil, salt: nil
alias __MODULE__
@type t :: %__MODULE__{
source_url: nil | String.t(),
options: keyword(list()),
extension: nil | String.t(),
prefix: nil | String.t(),
key: nil | String.t(),
salt: nil | String.t()
}
@typedoc """
A number of pixels to be used as a dimension.
"""
@type dimension :: float() | integer() | String.t()
@typedoc """
Provide type and enlarge configuration arguments to a resize option.
"""
@type resize_opts :: [
type: String.t(),
enlarge: boolean()
]
@doc """
Generate a new `t:Imgproxy.t/0` struct for the given image source URL.
"""
@spec new(String.t()) :: t()
def new(source_url) when is_binary(source_url) do
%Imgproxy{
source_url: source_url,
prefix: Application.get_env(:imgproxy, :prefix),
key: Application.get_env(:imgproxy, :key),
salt: Application.get_env(:imgproxy, :salt)
}
end
@doc """
Add a [formatting option](https://docs.imgproxy.net/generating_the_url_advanced) to the `t:Imgproxy.t/0`.
For instance, to add the [padding](https://docs.imgproxy.net/generating_the_url_advanced?id=padding) option
with a 10px padding on all sides, you can use:
iex> img = Imgproxy.new("http://example.com/image.jpg")
iex> Imgproxy.add_option(img, :padding, [10, 10, 10, 10]) |> to_string()
"https://imgcdn.example.com/insecure/padding:10:10:10:10/aHR0cDovL2V4YW1wbGUuY29tL2ltYWdlLmpwZw"
"""
@spec add_option(t(), atom(), list()) :: t()
def add_option(%Imgproxy{options: opts} = img, name, args)
when is_atom(name) and is_list(args) do
%Imgproxy{img | options: Keyword.put(opts, name, args)}
end
@doc """
Set the [gravity](https://docs.imgproxy.net/generating_the_url_advanced?id=gravity) option.
"""
@spec set_gravity(t(), atom(), dimension(), dimension()) :: t()
def set_gravity(img, type, xoffset \\ 0, yoffset \\ 0)
def set_gravity(img, "sm", _xoffset, _yoffset) do
add_option(img, :g, [:sm])
end
def set_gravity(img, :sm, _xoffset, _yoffset) do
add_option(img, :g, [:sm])
end
def set_gravity(img, type, xoffset, yoffset) do
add_option(img, :g, [type, xoffset, yoffset])
end
@doc """
[Resize](https://docs.imgproxy.net/generating_the_url_advanced?id=resize) an image to the given width and height.
Options include:
* type: "fit" (default), "fill", or "auto"
* enlarge: enlarge if necessary (`false` by default)
"""
@spec resize(t(), dimension(), dimension(), resize_opts()) :: t()
def resize(img, width, height, opts \\ []) do
type = Keyword.get(opts, :type, "fit")
enlarge = Keyword.get(opts, :enlarge, false)
add_option(img, :rs, [type, width, height, enlarge])
end
@doc """
[Crop](https://docs.imgproxy.net/generating_the_url_advanced?id=crop) an image to the given width and height.
Accepts an optional [gravity](https://docs.imgproxy.net/generating_the_url_advanced?id=gravity) parameter, by
default it is "ce:0:0" for center gravity with no offset.
"""
@spec crop(t(), dimension(), dimension(), String.t()) :: t()
def crop(img, width, height, gravity \\ "ce:0:0") do
add_option(img, :c, [width, height, gravity])
end
@doc """
Set the file extension (which will produce an image of that type).
For instance, setting the extension to "png" will result in a PNG being created:
iex> img = Imgproxy.new("http://example.com/image.jpg")
iex> Imgproxy.set_extension(img, "png") |> to_string()
"https://imgcdn.example.com/insecure/aHR0cDovL2V4YW1wbGUuY29tL2ltYWdlLmpwZw.png"
"""
@spec set_extension(t(), String.t()) :: t()
def set_extension(img, "." <> extension), do: set_extension(img, extension)
def set_extension(img, extension), do: %Imgproxy{img | extension: extension}
@doc """
Generate an imgproxy URL.
## Example
iex> Imgproxy.to_string(Imgproxy.new("https://placekitten.com/200/300"))
"https://imgcdn.example.com/insecure/aHR0cHM6Ly9wbGFjZWtpdHRlbi5jb20vMjAwLzMwMA"
"""
@spec to_string(t()) :: String.t()
defdelegate to_string(img), to: String.Chars.Imgproxy
end
defimpl String.Chars, for: Imgproxy do
def to_string(%Imgproxy{prefix: prefix, key: key, salt: salt} = img) do
path = build_path(img)
signature = gen_signature(path, key, salt)
Path.join([prefix || "", signature, path])
end
# @spec build_path(img_url :: String.t(), opts :: image_opts) :: String.t()
defp build_path(%Imgproxy{source_url: source_url, options: opts, extension: ext}) do
["/" | Enum.map(opts, &option_to_string/1)]
|> Path.join()
|> Path.join(encode_source_url(source_url, ext))
end
defp encode_source_url(source_url, nil) do
Base.url_encode64(source_url, padding: false)
end
defp encode_source_url(source_url, extension) do
encode_source_url(source_url, nil) <> "." <> extension
end
defp option_to_string({name, args}) when is_list(args) do
Enum.map_join([name | args], ":", &Kernel.to_string/1)
end
defp gen_signature(path, key, salt) when is_binary(key) and is_binary(salt) do
decoded_key = Base.decode16!(key, case: :lower)
decoded_salt = Base.decode16!(salt, case: :lower)
:hmac
|> :crypto.mac(:sha256, decoded_key, decoded_salt <> path)
|> Base.url_encode64(padding: false)
end
defp gen_signature(_path, _key, _salt), do: "insecure"
end
| 33.123529 | 115 | 0.661339 |
792a9933b3eefe5fb2b316f1ed20ab84498a367e | 1,855 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_intent_message_suggestions.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_intent_message_suggestions.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_intent_message_suggestions.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2IntentMessageSuggestions do
@moduledoc """
The collection of suggestions.
## Attributes
* `suggestions` (*type:* `list(GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2IntentMessageSuggestion.t)`, *default:* `nil`) - Required. The list of suggested replies.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:suggestions =>
list(GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2IntentMessageSuggestion.t())
| nil
}
field(:suggestions,
as: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2IntentMessageSuggestion,
type: :list
)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2IntentMessageSuggestions do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2IntentMessageSuggestions.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2IntentMessageSuggestions do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.54386 | 177 | 0.754717 |
792abf9acc9680493c5bd5213cc1ec01ead81c7a | 2,673 | ex | Elixir | lib/readability/candidate/scoring.ex | fribmendes/readability | 4f2449558da835b7cb1680e206f73cce91ab38a6 | [
"Apache-2.0"
] | null | null | null | lib/readability/candidate/scoring.ex | fribmendes/readability | 4f2449558da835b7cb1680e206f73cce91ab38a6 | [
"Apache-2.0"
] | null | null | null | lib/readability/candidate/scoring.ex | fribmendes/readability | 4f2449558da835b7cb1680e206f73cce91ab38a6 | [
"Apache-2.0"
] | null | null | null | defmodule Readability.Candidate.Scoring do
@moduledoc """
Score html tree
"""
alias Readability.Helper
@element_scores %{"div" => 5, "blockquote" => 3, "form" => -3, "th" => -5}
@type html_tree :: tuple | list
@type options :: list
@doc """
Score html tree by some algorithm that check children nodes, attributes, link densities, etcs..
options -> weight_classes :: boolean, calculate weight class
"""
@spec calc_score(html_tree, options) :: number
def calc_score(html_tree, opts \\ []) do
score = calc_node_score(html_tree, opts)
score =
score + calc_children_content_score(html_tree) +
calc_grand_children_content_score(html_tree)
score * (1 - calc_link_density(html_tree))
end
defp calc_content_score(html_tree) do
score = 1
inner_text = html_tree |> Floki.text()
split_score = inner_text |> String.split(",") |> length
length_score = [String.length(inner_text) / 100, 3] |> Enum.min()
score + split_score + length_score
end
defp calc_node_score({tag, attrs, _}, opts) do
score = 0
score = if opts[:weight_classes], do: score + class_weight(attrs), else: score
score + (@element_scores[tag] || 0)
end
defp calc_node_score([h | t], opts) do
calc_node_score(h, opts) + calc_node_score(t, opts)
end
defp calc_node_score([], _), do: 0
def class_weight(attrs) do
weight = 0
class = attrs |> List.keyfind("class", 0, {"", ""}) |> elem(1)
id = attrs |> List.keyfind("id", 0, {"", ""}) |> elem(1)
weight = if class =~ Readability.regexes(:positive), do: weight + 25, else: weight
weight = if id =~ Readability.regexes(:positive), do: weight + 25, else: weight
weight = if class =~ Readability.regexes(:negative), do: weight - 25, else: weight
weight = if id =~ Readability.regexes(:negative), do: weight - 25, else: weight
weight
end
def calc_link_density(html_tree) do
link_length =
html_tree
|> Floki.find("a")
|> Floki.text()
|> String.length()
text_length =
html_tree
|> Floki.text()
|> String.length()
if text_length == 0 do
0
else
link_length / text_length
end
end
defp calc_children_content_score({_, _, children_tree}) do
children_tree
|> Enum.filter(&(is_tuple(&1) && Helper.candidate_tag?(&1)))
|> calc_content_score
end
defp calc_grand_children_content_score({_, _, children_tree}) do
score =
children_tree
|> Enum.filter_map(&is_tuple(&1), &elem(&1, 2))
|> List.flatten()
|> Enum.filter(&(is_tuple(&1) && Helper.candidate_tag?(&1)))
|> calc_content_score
score / 2
end
end
| 28.136842 | 97 | 0.636738 |
792af5bf8672ccb9d9a9b5c92d9770394b08d69d | 1,141 | exs | Elixir | config/config.exs | btbinhtran/html_assertions | 299cd83e40b14ca7979a0b28fec13ddf53a9b4fb | [
"MIT"
] | 1 | 2019-04-24T00:11:19.000Z | 2019-04-24T00:11:19.000Z | config/config.exs | btbinhtran/html_assertions | 299cd83e40b14ca7979a0b28fec13ddf53a9b4fb | [
"MIT"
] | null | null | null | config/config.exs | btbinhtran/html_assertions | 299cd83e40b14ca7979a0b28fec13ddf53a9b4fb | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# third-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :html_assertion, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:html_assertion, :key)
#
# You can also configure a third-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
import_config "#{Mix.env()}.exs"
| 36.806452 | 73 | 0.755478 |
792b9ed75ca3994e80ef835a26f9f9700f4ba315 | 563 | ex | Elixir | lib/web/plugs/fetch_game.ex | oestrich/grapevine-legacy | 9d84f8e2d65dda5982686381ffa94a940142e1da | [
"MIT"
] | null | null | null | lib/web/plugs/fetch_game.ex | oestrich/grapevine-legacy | 9d84f8e2d65dda5982686381ffa94a940142e1da | [
"MIT"
] | null | null | null | lib/web/plugs/fetch_game.ex | oestrich/grapevine-legacy | 9d84f8e2d65dda5982686381ffa94a940142e1da | [
"MIT"
] | null | null | null | defmodule Web.Plugs.FetchGame do
@moduledoc """
Fetch a user from the session
"""
import Plug.Conn
alias Backbone.Games
def init(default), do: default
def call(conn, _opts) do
case Map.has_key?(conn.params, "client_id") do
true ->
fetch_game(conn, conn.params["client_id"])
false ->
conn
end
end
defp fetch_game(conn, client_id) do
case Games.get_by(client_id: client_id) do
{:ok, game} ->
assign(conn, :client_game, game)
{:error, :not_found} ->
conn
end
end
end
| 17.59375 | 50 | 0.609236 |
792ba401583a2e1c84dae2e793bbea88204d3927 | 2,253 | ex | Elixir | core/sup_tree_core/mnesia_nodes_cleaner.ex | sylph01/antikythera | 47a93f3d4c70975f7296725c9bde2ea823867436 | [
"Apache-2.0"
] | 144 | 2018-04-27T07:24:49.000Z | 2022-03-15T05:19:37.000Z | core/sup_tree_core/mnesia_nodes_cleaner.ex | sylph01/antikythera | 47a93f3d4c70975f7296725c9bde2ea823867436 | [
"Apache-2.0"
] | 123 | 2018-05-01T02:54:43.000Z | 2022-01-28T01:30:52.000Z | core/sup_tree_core/mnesia_nodes_cleaner.ex | sylph01/antikythera | 47a93f3d4c70975f7296725c9bde2ea823867436 | [
"Apache-2.0"
] | 14 | 2018-05-01T02:30:47.000Z | 2022-02-21T04:38:56.000Z | # Copyright(c) 2015-2021 ACCESS CO., LTD. All rights reserved.
use Croma
defmodule AntikytheraCore.MnesiaNodesCleaner do
@moduledoc """
A GenServer that periodically removes extra nodes from mnesia schema.
When a new node is started it automatically starts to sync both mnesia schema and data with other participating nodes.
On the other hand when a node is terminated it's not automatically removed from mnesia schema
(because mnesia has no idea whether the node will re-join the cluster or not).
This GenServer periodically cleans up any already-terminated nodes from mnesia schema
by using hosts information from the underlying infrastructure.
Without this cleanup the "already terminated nodes" would accumulate in mnesia schema
and make startup of new nodes really slow (new node tries to sync with nonexisting nodes until timeout).
Depends on `AntikytheraCore.ClusterHostsPoller`.
"""
@interval 300_000
use GenServer
alias AntikytheraCore.Cluster
alias AntikytheraCore.ClusterHostsPoller
require AntikytheraCore.Logger, as: L
def start_link([]) do
GenServer.start_link(__MODULE__, :ok)
end
@impl true
def init(:ok) do
{:ok, %{}, @interval}
end
@impl true
def handle_info(:timeout, state) do
case ClusterHostsPoller.current_hosts() do
{:ok, hosts} -> clean_nonexisting_nodes_from_mnesia(hosts)
# nothing we can do; just wait and retry again
{:error, :not_yet_initialized} -> :ok
end
{:noreply, state, @interval}
end
defp clean_nonexisting_nodes_from_mnesia(hosts) do
# Compare "host"s (String.t) instead of "node"s (atom) and avoid unnecessary conversions from String.t to atom.
connected_hosts = [Node.self() | Node.list()] |> MapSet.new(&Cluster.node_to_host/1)
current_known_hosts = Enum.into(hosts, connected_hosts, fn {h, _} -> h end)
:mnesia.system_info(:db_nodes)
|> Enum.reject(fn n -> MapSet.member?(current_known_hosts, Cluster.node_to_host(n)) end)
|> Enum.each(fn n ->
L.info("removing #{n} from mnesia schema")
# :mnesia.del_table_copy(:schema, node) is idempotent; it's OK for multiple nodes to concurrently call this.
{:atomic, :ok} = :mnesia.del_table_copy(:schema, n)
end)
end
end
| 35.761905 | 120 | 0.730581 |
792badfb5a069955add3ec2e374bdad0a3dd8b3d | 7,160 | ex | Elixir | lib/ueberauth/adfs.ex | DefactoSoftware/ueberauth_adfs | 7cdc0acb66a47e6ea09c8f90d998f6fc5fca698d | [
"MIT"
] | null | null | null | lib/ueberauth/adfs.ex | DefactoSoftware/ueberauth_adfs | 7cdc0acb66a47e6ea09c8f90d998f6fc5fca698d | [
"MIT"
] | 1 | 2022-02-04T10:59:48.000Z | 2022-02-04T10:59:48.000Z | lib/ueberauth/adfs.ex | DefactoSoftware/ueberauth_adfs | 7cdc0acb66a47e6ea09c8f90d998f6fc5fca698d | [
"MIT"
] | null | null | null | defmodule Ueberauth.Strategy.ADFS do
@moduledoc """
ADFS Strategy for Überauth.
In ADFS Server setup a new Client using Powershell:
```powershell
Add-AdfsClient -Name "OAUTH2 Client" -ClientId "unique-custom-client-id" -RedirectUri "http://localhost:4000/auth/adfs/callback"
Add-ADFSRelyingPartyTrust -Name "OAUTH2 Client" -Identifier "http://localhost:4000/auth/adfs"
Set-AdfsRelyingPartyTrust -IssuanceAuthorizationRulesFile "TransformRules.txt"
```
In TransformRules.txt put the following:
```
@RuleTemplate = "LdapClaims"
@RuleName = "User Details"
c:[Type == "http://schemas.microsoft.com/ws/2008/06/identity/claims/windowsaccountname", Issuer == "AD AUTHORITY"]
=> issue(store = "Active Directory", types = ("http://schemas.microsoft.com/ws/2008/06/identity/claims/windowsaccountname", "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname", "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname", "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress", "groups", "userPrincipalName"), query = ";sAMAccountName,givenName,sn,mail,tokenGroups,userPrincipalName;{0}", param = c.Value);
```
Add 'adfs_url', 'adfs_metadata_url', 'client_id', 'resource_identifier' and optionally adfs_handler
to your configuration:
```elixir
config :ueberauth, Ueberauth.Strategy.ADFS,
adfs_url: "https://adfs.url",
adfs_metadata_url: "https://path.to/FederationMetadata.xml",
adfs_handler: MyApp.ADFSHandler, # Use custom handler to extract information from the token claims
client_id: "the_client",
resource_identifier: "the_resource_id"
```
An example custom ADFS handler
```elixir
defmodule MyApp.ADFSHandler do
use Ueberauth.Strategy.ADFS.Handler
def credentials(conn) do
token = conn.private.adfs_token
%Credentials{
expires: token.claims["exp"] != nil,
expires_at: token.claims["exp"],
scopes: token.claims["aud"],
token: token.token
}
end
@doc false
def info(conn) do
user = conn.private.adfs_user
%Info{
nickname: user["winaccountname"],
name: "\#{user["given_name"]} \#{user["family_name"]}",
email: user["email"],
first_name: user["given_name"],
last_name: user["family_name"]
}
end
@doc false
def extra(conn) do
user = conn.private.adfs_user
%Extra{
raw_info: %{
token: conn.private[:adfs_token],
user: user,
groups: user["groups"]
}
}
end
end
```
"""
import SweetXml
use Ueberauth.Strategy
alias Ueberauth.Strategy.ADFS.OAuth
def handle_request!(conn) do
if __MODULE__.configured?() do
redirect_to_authorization(conn)
else
redirect!(conn, "/")
end
end
def logout(conn, token) do
params = %{redirect_uri: callback_url(conn), token: token}
with {:ok, signout_url} <- OAuth.signout_url(params) do
redirect!(conn, signout_url)
else
_ ->
set_errors!(conn, [error("Logout Failed", "Failed to logout, please close your browser")])
end
end
def handle_callback!(%Plug.Conn{params: %{"code" => code}} = conn) do
with {:ok, client} <- OAuth.get_token(code, redirect_uri: callback_url(conn)) do
fetch_user(conn, client)
else
{:error, %{reason: reason}} ->
set_errors!(conn, [error("Authentication Error", reason)])
{:error, %OAuth2.Response{body: %{"error_description" => reason}}} ->
set_errors!(conn, [error("Authentication Error", reason)])
end
end
def handle_callback!(
%Plug.Conn{params: %{"error" => error, "error_description" => error_description}} = conn
) do
set_errors!(conn, [error(error, error_description)])
end
def handle_callback!(conn) do
set_errors!(conn, [error("missing_code", "No code received")])
end
def handle_cleanup!(conn) do
conn
|> put_private(:adfs_user, nil)
|> put_private(:adfs_token, nil)
|> put_private(:adfs_handler, nil)
end
def uid(conn) do
uid_field =
conn
|> option(:uid_field)
|> to_string
conn.private.adfs_user[uid_field]
end
def credentials(conn) do
apply(conn.private.adfs_handler, :credentials, [conn])
end
def info(conn) do
apply(conn.private.adfs_handler, :info, [conn])
end
def extra(conn) do
apply(conn.private.adfs_handler, :extra, [conn])
end
def configured? do
:ueberauth
|> Application.get_env(__MODULE__)
|> env_present?
end
defp fetch_user(conn, %{token: %{access_token: access_token}}) do
url = config(:adfs_metadata_url)
adfs_handler = config(:adfs_handler) || Ueberauth.Strategy.ADFS.DefaultHandler
conn = put_private(conn, :adfs_handler, adfs_handler)
with {:ok, %HTTPoison.Response{body: metadata}} <-
HTTPoison.get(url, [], ssl: [versions: [:"tlsv1.2"]]),
true <- String.starts_with?(metadata, "<EntityDescriptor"),
{:ok, certificate} <- cert_from_metadata(metadata) do
key =
certificate
|> JOSE.JWK.from_pem()
|> Joken.rs256()
jwt =
access_token
|> Joken.token()
|> Joken.with_signer(key)
|> Joken.verify()
conn = put_private(conn, :adfs_token, jwt)
with %Joken.Token{claims: claims_user} <- jwt do
put_private(conn, :adfs_user, claims_user)
else
_ -> set_errors!(conn, [error("token", "unauthorized")])
end
else
{:error, %HTTPoison.Error{}} -> set_errors!(conn, [error("metadata_url", "not_found")])
{:error, :cert_not_found} -> set_errors!(conn, [error("certificate", "not_found")])
false -> set_errors!(conn, [error("metadata", "malformed")])
end
end
defp cert_from_metadata(metadata) when is_binary(metadata) do
metadata
|> xpath(~x"//EntityDescriptor/ds:Signature/KeyInfo/X509Data/X509Certificate/text()"s)
|> build_cert()
end
defp build_cert(cert_content)
when is_binary(cert_content) and byte_size(cert_content) > 0 do
{:ok,
"""
-----BEGIN CERTIFICATE-----
#{cert_content}
-----END CERTIFICATE-----
"""}
end
defp build_cert(_), do: {:error, :cert_not_found}
defp option(conn, key) do
Keyword.get(options(conn), key, Keyword.get(default_options(), key))
end
defp config(option) do
:ueberauth
|> Application.get_env(__MODULE__)
|> Keyword.get(option)
end
defp redirect_to_authorization(conn) do
authorize_url =
conn.params
|> Map.put(:resource, config(:resource_identifier))
|> Map.put(:redirect_uri, callback_url(conn))
|> OAuth.authorize_url!()
redirect!(conn, authorize_url)
end
defp env_present?(env) do
if Keyword.has_key?(env, :adfs_url)
&& Keyword.has_key?(env, :adfs_metadata_url)
&& Keyword.has_key?(env, :client_id)
&& Keyword.has_key?(env, :resource_identifier) do
env
|> Keyword.take([:adfs_url, :adfs_metadata_url, :client_id, :resource_identifier])
|> Keyword.values()
|> Enum.all?(&(byte_size(&1 || <<>>) > 0))
else
false
end
end
end
| 29.22449 | 456 | 0.649022 |
792bcb18e3f1ba010fa9593f0471018eb8de89b0 | 1,880 | exs | Elixir | priv/repo/migrations/20201211141735_improve_lti_workflow.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 45 | 2020-04-17T15:40:27.000Z | 2022-03-25T00:13:30.000Z | priv/repo/migrations/20201211141735_improve_lti_workflow.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 944 | 2020-02-13T02:37:01.000Z | 2022-03-31T17:50:07.000Z | priv/repo/migrations/20201211141735_improve_lti_workflow.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 23 | 2020-07-28T03:36:13.000Z | 2022-03-17T14:29:02.000Z | defmodule Oli.Repo.Migrations.ImproveLtiWorkflow do
use Ecto.Migration
def up do
alter table(:institutions) do
remove :author_id, references(:authors)
end
create table(:pending_registrations) do
add :country_code, :string
add :institution_email, :string
add :institution_url, :string
add :name, :string
add :timezone, :string
add :issuer, :string
add :client_id, :string
add :key_set_url, :string
add :auth_token_url, :string
add :auth_login_url, :string
add :auth_server, :string
timestamps()
end
create unique_index(:pending_registrations, [:issuer, :client_id])
drop(constraint(:lti_1p3_registrations, "lti_1p3_registrations_institution_id_fkey"))
alter table(:lti_1p3_registrations) do
modify(:institution_id, references(:institutions, on_delete: :delete_all), null: false)
end
drop(constraint(:lti_1p3_deployments, "lti_1p3_deployments_registration_id_fkey"))
alter table(:lti_1p3_deployments) do
modify(:registration_id, references(:lti_1p3_registrations, on_delete: :delete_all),
null: false
)
end
end
def down do
drop(constraint(:lti_1p3_deployments, "lti_1p3_deployments_registration_id_fkey"))
alter table(:lti_1p3_deployments) do
modify(:registration_id, references(:lti_1p3_registrations, on_delete: :nothing),
null: false
)
end
drop(constraint(:lti_1p3_registrations, "lti_1p3_registrations_institution_id_fkey"))
alter table(:lti_1p3_registrations) do
modify(:institution_id, references(:institutions, on_delete: :nothing), null: false)
end
drop unique_index(:pending_registrations, [:issuer, :client_id])
drop table(:pending_registrations)
alter table(:institutions) do
add :author_id, references(:authors)
end
end
end
| 28.059701 | 93 | 0.71117 |
792bd58d5db8644cd680d3f0f4290fcb0c384706 | 1,603 | ex | Elixir | clients/books/lib/google_api/books/v1/model/family_info.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/books/lib/google_api/books/v1/model/family_info.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/books/lib/google_api/books/v1/model/family_info.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Books.V1.Model.FamilyInfo do
@moduledoc """
## Attributes
* `kind` (*type:* `String.t`, *default:* `nil`) - Resource type.
* `membership` (*type:* `GoogleApi.Books.V1.Model.FamilyInfoMembership.t`, *default:* `nil`) - Family membership info of the user that made the request.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => String.t(),
:membership => GoogleApi.Books.V1.Model.FamilyInfoMembership.t()
}
field(:kind)
field(:membership, as: GoogleApi.Books.V1.Model.FamilyInfoMembership)
end
defimpl Poison.Decoder, for: GoogleApi.Books.V1.Model.FamilyInfo do
def decode(value, options) do
GoogleApi.Books.V1.Model.FamilyInfo.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Books.V1.Model.FamilyInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.06 | 156 | 0.724891 |
792bd634ff030689dbbbc3f994d601fcce3c3da2 | 15,590 | ex | Elixir | lib/elixir/lib/kernel/cli.ex | mrcasals/elixir | 8cb9ebf708f2789a0e7dbf574294b82a55dd2f21 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/kernel/cli.ex | mrcasals/elixir | 8cb9ebf708f2789a0e7dbf574294b82a55dd2f21 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/kernel/cli.ex | mrcasals/elixir | 8cb9ebf708f2789a0e7dbf574294b82a55dd2f21 | [
"Apache-2.0"
] | null | null | null | defmodule Kernel.CLI do
@moduledoc false
@compile {:no_warn_undefined, [Logger, IEx]}
@blank_config %{
commands: [],
output: ".",
compile: [],
no_halt: false,
compiler_options: [],
errors: [],
pa: [],
pz: [],
verbose_compile: false,
profile: nil
}
@doc """
This is the API invoked by Elixir boot process.
"""
def main(argv) do
argv = for arg <- argv, do: IO.chardata_to_string(arg)
{config, argv} = parse_argv(argv)
System.argv(argv)
System.no_halt(config.no_halt)
fun = fn _ ->
errors = process_commands(config)
if errors != [] do
Enum.each(errors, &IO.puts(:stderr, &1))
System.halt(1)
end
end
run(fun)
end
@doc """
Runs the given function by catching any failure
and printing them to stdout. `at_exit` hooks are
also invoked before exiting.
This function is used by Elixir's CLI and also
by escripts generated by Elixir.
"""
def run(fun) do
{ok_or_shutdown, status} = exec_fun(fun, {:ok, 0})
if ok_or_shutdown == :shutdown or not System.no_halt() do
{_, status} = at_exit({ok_or_shutdown, status})
# Ensure Logger messages are flushed before halting
case :erlang.whereis(Logger) do
pid when is_pid(pid) -> Logger.flush()
_ -> :ok
end
System.halt(status)
end
end
@doc """
Parses the CLI arguments. Made public for testing.
"""
def parse_argv(argv) do
parse_argv(argv, @blank_config)
end
@doc """
Process CLI commands. Made public for testing.
"""
def process_commands(config) do
results = Enum.map(Enum.reverse(config.commands), &process_command(&1, config))
errors = for {:error, msg} <- results, do: msg
Enum.reverse(config.errors, errors)
end
@doc """
Shared helper for error formatting on CLI tools.
"""
def format_error(kind, reason, stacktrace) do
{blamed, stacktrace} = Exception.blame(kind, reason, stacktrace)
iodata =
case blamed do
%FunctionClauseError{} ->
formatted = Exception.format_banner(kind, reason, stacktrace)
padded_blame = pad(FunctionClauseError.blame(blamed, &inspect/1, &blame_match/1))
[formatted, padded_blame]
_ ->
Exception.format_banner(kind, blamed, stacktrace)
end
[iodata, ?\n, Exception.format_stacktrace(prune_stacktrace(stacktrace))]
end
@doc """
Function invoked across nodes for `--rpc-eval`.
"""
def rpc_eval(expr) do
wrapper(fn -> Code.eval_string(expr) end)
catch
kind, reason -> {kind, reason, __STACKTRACE__}
end
## Helpers
defp at_exit(res) do
hooks = :elixir_config.get_and_put(:at_exit, [])
res = Enum.reduce(hooks, res, &exec_fun/2)
if hooks == [], do: res, else: at_exit(res)
end
defp exec_fun(fun, res) when is_function(fun, 1) and is_tuple(res) do
parent = self()
{pid, ref} =
spawn_monitor(fn ->
try do
fun.(elem(res, 1))
catch
:exit, {:shutdown, int} when is_integer(int) ->
send(parent, {self(), {:shutdown, int}})
exit({:shutdown, int})
:exit, reason
when reason == :normal
when reason == :shutdown
when tuple_size(reason) == 2 and elem(reason, 0) == :shutdown ->
send(parent, {self(), {:shutdown, 0}})
exit(reason)
kind, reason ->
print_error(kind, reason, __STACKTRACE__)
send(parent, {self(), {:shutdown, 1}})
exit(to_exit(kind, reason, __STACKTRACE__))
else
_ ->
send(parent, {self(), res})
end
end)
receive do
{^pid, res} ->
:erlang.demonitor(ref, [:flush])
res
{:DOWN, ^ref, _, _, other} ->
print_error({:EXIT, pid}, other, [])
{:shutdown, 1}
end
end
defp to_exit(:throw, reason, stack), do: {{:nocatch, reason}, stack}
defp to_exit(:error, reason, stack), do: {reason, stack}
defp to_exit(:exit, reason, _stack), do: reason
defp shared_option?(list, config, callback) do
case parse_shared(list, config) do
{[h | hs], _} when h == hd(list) ->
new_config = %{config | errors: ["#{h} : Unknown option" | config.errors]}
callback.(hs, new_config)
{new_list, new_config} ->
callback.(new_list, new_config)
end
end
## Error handling
defp print_error(kind, reason, stacktrace) do
IO.write(:stderr, format_error(kind, reason, stacktrace))
end
defp blame_match(%{match?: true, node: node}), do: blame_ansi(:normal, "+", node)
defp blame_match(%{match?: false, node: node}), do: blame_ansi(:red, "-", node)
defp blame_ansi(color, no_ansi, node) do
if IO.ANSI.enabled?() do
[color | Macro.to_string(node)]
|> IO.ANSI.format(true)
|> IO.iodata_to_binary()
else
no_ansi <> Macro.to_string(node) <> no_ansi
end
end
defp pad(string) do
" " <> String.replace(string, "\n", "\n ")
end
@elixir_internals [:elixir, :elixir_aliases, :elixir_expand, :elixir_compiler, :elixir_module] ++
[:elixir_clauses, :elixir_lexical, :elixir_def, :elixir_map] ++
[:elixir_erl, :elixir_erl_clauses, :elixir_erl_pass] ++
[Kernel.ErrorHandler, Module.ParallelChecker]
defp prune_stacktrace([{mod, _, _, _} | t]) when mod in @elixir_internals do
prune_stacktrace(t)
end
defp prune_stacktrace([{__MODULE__, :wrapper, 1, _} | _]) do
[]
end
defp prune_stacktrace([h | t]) do
[h | prune_stacktrace(t)]
end
defp prune_stacktrace([]) do
[]
end
# Parse shared options
defp parse_shared([opt | _t], _config) when opt in ["-v", "--version"] do
if function_exported?(IEx, :started?, 0) and IEx.started?() do
IO.puts("IEx " <> System.build_info()[:build])
else
IO.puts(:erlang.system_info(:system_version))
IO.puts("Elixir " <> System.build_info()[:build])
end
System.halt(0)
end
defp parse_shared(["-pa", h | t], config) do
paths = expand_code_path(h)
Enum.each(paths, &:code.add_patha/1)
parse_shared(t, %{config | pa: config.pa ++ paths})
end
defp parse_shared(["-pz", h | t], config) do
paths = expand_code_path(h)
Enum.each(paths, &:code.add_pathz/1)
parse_shared(t, %{config | pz: config.pz ++ paths})
end
defp parse_shared(["--app", h | t], config) do
parse_shared(t, %{config | commands: [{:app, h} | config.commands]})
end
defp parse_shared(["--no-halt" | t], config) do
parse_shared(t, %{config | no_halt: true})
end
defp parse_shared(["-e", h | t], config) do
parse_shared(t, %{config | commands: [{:eval, h} | config.commands]})
end
defp parse_shared(["--eval", h | t], config) do
parse_shared(t, %{config | commands: [{:eval, h} | config.commands]})
end
defp parse_shared(["--rpc-eval", node, h | t], config) do
node = append_hostname(node)
parse_shared(t, %{config | commands: [{:rpc_eval, node, h} | config.commands]})
end
defp parse_shared(["-r", h | t], config) do
parse_shared(t, %{config | commands: [{:require, h} | config.commands]})
end
defp parse_shared(["-pr", h | t], config) do
parse_shared(t, %{config | commands: [{:parallel_require, h} | config.commands]})
end
defp parse_shared(list, config) do
{list, config}
end
defp append_hostname(node) do
case :string.find(node, "@") do
:nomatch -> node <> :string.find(Atom.to_string(node()), "@")
_ -> node
end
end
defp expand_code_path(path) do
path = Path.expand(path)
case Path.wildcard(path) do
[] -> [to_charlist(path)]
list -> Enum.map(list, &to_charlist/1)
end
end
# Process init options
defp parse_argv(["--" | t], config) do
{config, t}
end
defp parse_argv(["+elixirc" | t], config) do
parse_compiler(t, config)
end
defp parse_argv(["+iex" | t], config) do
parse_iex(t, config)
end
defp parse_argv(["-S", h | t], config) do
{%{config | commands: [{:script, h} | config.commands]}, t}
end
defp parse_argv([h | t] = list, config) do
case h do
"-" <> _ ->
shared_option?(list, config, &parse_argv(&1, &2))
_ ->
if List.keymember?(config.commands, :eval, 0) do
{config, list}
else
{%{config | commands: [{:file, h} | config.commands]}, t}
end
end
end
defp parse_argv([], config) do
{config, []}
end
# Parse compiler options
defp parse_compiler(["--" | t], config) do
{config, t}
end
defp parse_compiler(["-o", h | t], config) do
parse_compiler(t, %{config | output: h})
end
defp parse_compiler(["--no-docs" | t], config) do
parse_compiler(t, %{config | compiler_options: [{:docs, false} | config.compiler_options]})
end
defp parse_compiler(["--no-debug-info" | t], config) do
compiler_options = [{:debug_info, false} | config.compiler_options]
parse_compiler(t, %{config | compiler_options: compiler_options})
end
defp parse_compiler(["--ignore-module-conflict" | t], config) do
compiler_options = [{:ignore_module_conflict, true} | config.compiler_options]
parse_compiler(t, %{config | compiler_options: compiler_options})
end
defp parse_compiler(["--warnings-as-errors" | t], config) do
compiler_options = [{:warnings_as_errors, true} | config.compiler_options]
parse_compiler(t, %{config | compiler_options: compiler_options})
end
defp parse_compiler(["--verbose" | t], config) do
parse_compiler(t, %{config | verbose_compile: true})
end
# Private compiler options
defp parse_compiler(["--profile", "time" | t], config) do
parse_compiler(t, %{config | profile: :time})
end
defp parse_compiler([h | t] = list, config) do
case h do
"-" <> _ ->
shared_option?(list, config, &parse_compiler(&1, &2))
_ ->
pattern = if File.dir?(h), do: "#{h}/**/*.ex", else: h
parse_compiler(t, %{config | compile: [pattern | config.compile]})
end
end
defp parse_compiler([], config) do
{%{config | commands: [{:compile, config.compile} | config.commands]}, []}
end
# Parse IEx options
defp parse_iex(["--" | t], config) do
{config, t}
end
# This clause is here so that Kernel.CLI does not
# error out with "unknown option"
defp parse_iex(["--dot-iex", _ | t], config) do
parse_iex(t, config)
end
defp parse_iex([opt, _ | t], config) when opt in ["--remsh"] do
parse_iex(t, config)
end
defp parse_iex(["-S", h | t], config) do
{%{config | commands: [{:script, h} | config.commands]}, t}
end
defp parse_iex([h | t] = list, config) do
case h do
"-" <> _ -> shared_option?(list, config, &parse_iex(&1, &2))
_ -> {%{config | commands: [{:file, h} | config.commands]}, t}
end
end
defp parse_iex([], config) do
{config, []}
end
# Process commands
defp process_command({:cookie, h}, _config) do
if Node.alive?() do
wrapper(fn -> Node.set_cookie(String.to_atom(h)) end)
else
{:error, "--cookie : Cannot set cookie if the node is not alive (set --name or --sname)"}
end
end
defp process_command({:eval, expr}, _config) when is_binary(expr) do
wrapper(fn -> Code.eval_string(expr, []) end)
end
defp process_command({:rpc_eval, node, expr}, _config) when is_binary(expr) do
case :rpc.call(String.to_atom(node), __MODULE__, :rpc_eval, [expr]) do
:ok -> :ok
{:badrpc, {:EXIT, exit}} -> Process.exit(self(), exit)
{:badrpc, reason} -> {:error, "--rpc-eval : RPC failed with reason #{inspect(reason)}"}
{kind, error, stack} -> :erlang.raise(kind, error, stack)
end
end
defp process_command({:app, app}, _config) when is_binary(app) do
case Application.ensure_all_started(String.to_atom(app)) do
{:error, {app, reason}} ->
msg = "--app : Could not start application #{app}: " <> Application.format_error(reason)
{:error, msg}
{:ok, _} ->
:ok
end
end
defp process_command({:script, file}, _config) when is_binary(file) do
if exec = find_elixir_executable(file) do
wrapper(fn -> Code.require_file(exec) end)
else
{:error, "-S : Could not find executable #{file}"}
end
end
defp process_command({:file, file}, _config) when is_binary(file) do
if File.regular?(file) do
wrapper(fn -> Code.require_file(file) end)
else
{:error, "No file named #{file}"}
end
end
defp process_command({:require, pattern}, _config) when is_binary(pattern) do
files = filter_patterns(pattern)
if files != [] do
wrapper(fn -> Enum.map(files, &Code.require_file(&1)) end)
else
{:error, "-r : No files matched pattern #{pattern}"}
end
end
defp process_command({:parallel_require, pattern}, _config) when is_binary(pattern) do
files = filter_patterns(pattern)
if files != [] do
wrapper(fn ->
case Kernel.ParallelCompiler.require(files) do
{:ok, _, _} -> :ok
{:error, _, _} -> exit({:shutdown, 1})
end
end)
else
{:error, "-pr : No files matched pattern #{pattern}"}
end
end
defp process_command({:compile, patterns}, config) do
# If ensuring the dir returns an error no files will be found.
_ = :filelib.ensure_dir(:filename.join(config.output, "."))
case filter_multiple_patterns(patterns) do
{:ok, []} ->
{:error, "No files matched provided patterns"}
{:ok, files} ->
wrapper(fn ->
Code.compiler_options(config.compiler_options)
verbose_opts =
if config.verbose_compile do
[each_file: &IO.puts("Compiling #{Path.relative_to_cwd(&1)}")]
else
[
each_long_compilation:
&IO.puts("Compiling #{Path.relative_to_cwd(&1)} (it's taking more than 10s)")
]
end
profile_opts =
if config.profile do
[profile: config.profile]
else
[]
end
opts = verbose_opts ++ profile_opts
case Kernel.ParallelCompiler.compile_to_path(files, config.output, opts) do
{:ok, _, _} -> :ok
{:error, _, _} -> exit({:shutdown, 1})
end
end)
{:missing, missing} ->
{:error, "No files matched pattern(s) #{Enum.join(missing, ",")}"}
end
end
defp filter_patterns(pattern) do
pattern
|> Path.expand()
|> Path.wildcard()
|> :lists.usort()
|> Enum.filter(&File.regular?/1)
end
defp filter_multiple_patterns(patterns) do
{files, missing} =
Enum.reduce(patterns, {[], []}, fn pattern, {files, missing} ->
case filter_patterns(pattern) do
[] -> {files, [pattern | missing]}
match -> {match ++ files, missing}
end
end)
case missing do
[] -> {:ok, :lists.usort(files)}
_ -> {:missing, :lists.usort(missing)}
end
end
defp wrapper(fun) do
_ = fun.()
:ok
end
defp find_elixir_executable(file) do
if exec = System.find_executable(file) do
# If we are on Windows, the executable is going to be
# a .bat file that must be in the same directory as
# the actual Elixir executable.
case :os.type() do
{:win32, _} ->
base = Path.rootname(exec)
if File.regular?(base), do: base, else: exec
_ ->
exec
end
end
end
end
| 27.207679 | 99 | 0.597498 |
792bf209281fc221b7142f6cc7ecbca818282333 | 20,705 | ex | Elixir | clients/big_query/lib/google_api/big_query/v2/api/jobs.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/big_query/lib/google_api/big_query/v2/api/jobs.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/big_query/lib/google_api/big_query/v2/api/jobs.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.BigQuery.V2.Api.Jobs do
@moduledoc """
API calls for all endpoints tagged `Jobs`.
"""
alias GoogleApi.BigQuery.V2.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Requests that a job be cancelled. This call will return immediately, and the client will need to poll for the job status to see if the cancel completed successfully. Cancelled jobs may still incur costs.
## Parameters
- connection (GoogleApi.BigQuery.V2.Connection): Connection to server
- project_id (String.t): [Required] Project ID of the job to cancel
- job_id (String.t): [Required] Job ID of the job to cancel
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :location (String.t): The geographic location of the job. Required except for US and EU. See details at https://cloud.google.com/bigquery/docs/locations#specifying_your_location.
## Returns
{:ok, %GoogleApi.BigQuery.V2.Model.JobCancelResponse{}} on success
{:error, info} on failure
"""
@spec bigquery_jobs_cancel(Tesla.Env.client(), String.t(), String.t(), keyword()) ::
{:ok, GoogleApi.BigQuery.V2.Model.JobCancelResponse.t()} | {:error, Tesla.Env.t()}
def bigquery_jobs_cancel(connection, project_id, job_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:location => :query
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/bigquery/v2/projects/{projectId}/jobs/{jobId}/cancel", %{
"projectId" => URI.encode_www_form(project_id),
"jobId" => URI.encode_www_form(job_id)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQuery.V2.Model.JobCancelResponse{}])
end
@doc """
Returns information about a specific job. Job information is available for a six month period after creation. Requires that you're the person who ran the job, or have the Is Owner project role.
## Parameters
- connection (GoogleApi.BigQuery.V2.Connection): Connection to server
- project_id (String.t): [Required] Project ID of the requested job
- job_id (String.t): [Required] Job ID of the requested job
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :location (String.t): The geographic location of the job. Required except for US and EU. See details at https://cloud.google.com/bigquery/docs/locations#specifying_your_location.
## Returns
{:ok, %GoogleApi.BigQuery.V2.Model.Job{}} on success
{:error, info} on failure
"""
@spec bigquery_jobs_get(Tesla.Env.client(), String.t(), String.t(), keyword()) ::
{:ok, GoogleApi.BigQuery.V2.Model.Job.t()} | {:error, Tesla.Env.t()}
def bigquery_jobs_get(connection, project_id, job_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:location => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/bigquery/v2/projects/{projectId}/jobs/{jobId}", %{
"projectId" => URI.encode_www_form(project_id),
"jobId" => URI.encode_www_form(job_id)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQuery.V2.Model.Job{}])
end
@doc """
Retrieves the results of a query job.
## Parameters
- connection (GoogleApi.BigQuery.V2.Connection): Connection to server
- project_id (String.t): [Required] Project ID of the query job
- job_id (String.t): [Required] Job ID of the query job
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :location (String.t): The geographic location where the job should run. Required except for US and EU. See details at https://cloud.google.com/bigquery/docs/locations#specifying_your_location.
- :maxResults (integer()): Maximum number of results to read
- :pageToken (String.t): Page token, returned by a previous call, to request the next page of results
- :startIndex (String.t): Zero-based index of the starting row
- :timeoutMs (integer()): How long to wait for the query to complete, in milliseconds, before returning. Default is 10 seconds. If the timeout passes before the job completes, the 'jobComplete' field in the response will be false
## Returns
{:ok, %GoogleApi.BigQuery.V2.Model.GetQueryResultsResponse{}} on success
{:error, info} on failure
"""
@spec bigquery_jobs_get_query_results(Tesla.Env.client(), String.t(), String.t(), keyword()) ::
{:ok, GoogleApi.BigQuery.V2.Model.GetQueryResultsResponse.t()} | {:error, Tesla.Env.t()}
def bigquery_jobs_get_query_results(
connection,
project_id,
job_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:location => :query,
:maxResults => :query,
:pageToken => :query,
:startIndex => :query,
:timeoutMs => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/bigquery/v2/projects/{projectId}/queries/{jobId}", %{
"projectId" => URI.encode_www_form(project_id),
"jobId" => URI.encode_www_form(job_id)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQuery.V2.Model.GetQueryResultsResponse{}])
end
@doc """
Starts a new asynchronous job. Requires the Can View project role.
## Parameters
- connection (GoogleApi.BigQuery.V2.Connection): Connection to server
- project_id (String.t): Project ID of the project that will be billed for the job
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :body (Job):
## Returns
{:ok, %GoogleApi.BigQuery.V2.Model.Job{}} on success
{:error, info} on failure
"""
@spec bigquery_jobs_insert(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.BigQuery.V2.Model.Job.t()} | {:error, Tesla.Env.t()}
def bigquery_jobs_insert(connection, project_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/bigquery/v2/projects/{projectId}/jobs", %{
"projectId" => URI.encode_www_form(project_id)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQuery.V2.Model.Job{}])
end
@doc """
Starts a new asynchronous job. Requires the Can View project role.
## Parameters
- connection (GoogleApi.BigQuery.V2.Connection): Connection to server
- project_id (String.t): Project ID of the project that will be billed for the job
- upload_type (String.t): Upload type. Must be \"resumable\".
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :body (Job):
## Returns
{:ok, %{}} on success
{:error, info} on failure
"""
@spec bigquery_jobs_insert_resumable(Tesla.Env.client(), String.t(), String.t(), keyword()) ::
{:ok, nil} | {:error, Tesla.Env.t()}
def bigquery_jobs_insert_resumable(
connection,
project_id,
upload_type,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/resumable/upload/bigquery/v2/projects/{projectId}/jobs", %{
"projectId" => URI.encode_www_form(project_id)
})
|> Request.add_param(:query, :uploadType, upload_type)
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [decode: false])
end
@doc """
Starts a new asynchronous job. Requires the Can View project role.
## Parameters
- connection (GoogleApi.BigQuery.V2.Connection): Connection to server
- project_id (String.t): Project ID of the project that will be billed for the job
- upload_type (String.t): Upload type. Must be \"multipart\".
- metadata (Job): Job metadata.
- data (String.t): The file to upload.
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
## Returns
{:ok, %GoogleApi.BigQuery.V2.Model.Job{}} on success
{:error, info} on failure
"""
@spec bigquery_jobs_insert_simple(
Tesla.Env.client(),
String.t(),
String.t(),
GoogleApi.BigQuery.V2.Model.Job.t(),
String.t(),
keyword()
) :: {:ok, GoogleApi.BigQuery.V2.Model.Job.t()} | {:error, Tesla.Env.t()}
def bigquery_jobs_insert_simple(
connection,
project_id,
upload_type,
metadata,
data,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/upload/bigquery/v2/projects/{projectId}/jobs", %{
"projectId" => URI.encode_www_form(project_id)
})
|> Request.add_param(:query, :uploadType, upload_type)
|> Request.add_param(:body, :metadata, metadata)
|> Request.add_param(:file, :data, data)
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQuery.V2.Model.Job{}])
end
@doc """
Lists all jobs that you started in the specified project. Job information is available for a six month period after creation. The job list is sorted in reverse chronological order, by job creation time. Requires the Can View project role, or the Is Owner project role if you set the allUsers property.
## Parameters
- connection (GoogleApi.BigQuery.V2.Connection): Connection to server
- project_id (String.t): Project ID of the jobs to list
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :allUsers (boolean()): Whether to display jobs owned by all users in the project. Default false
- :maxCreationTime (String.t): Max value for job creation time, in milliseconds since the POSIX epoch. If set, only jobs created before or at this timestamp are returned
- :maxResults (integer()): Maximum number of results to return
- :minCreationTime (String.t): Min value for job creation time, in milliseconds since the POSIX epoch. If set, only jobs created after or at this timestamp are returned
- :pageToken (String.t): Page token, returned by a previous call, to request the next page of results
- :projection (String.t): Restrict information returned to a set of selected fields
- :stateFilter ([String.t]): Filter for job state
## Returns
{:ok, %GoogleApi.BigQuery.V2.Model.JobList{}} on success
{:error, info} on failure
"""
@spec bigquery_jobs_list(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.BigQuery.V2.Model.JobList.t()} | {:error, Tesla.Env.t()}
def bigquery_jobs_list(connection, project_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:allUsers => :query,
:maxCreationTime => :query,
:maxResults => :query,
:minCreationTime => :query,
:pageToken => :query,
:projection => :query,
:stateFilter => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/bigquery/v2/projects/{projectId}/jobs", %{
"projectId" => URI.encode_www_form(project_id)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQuery.V2.Model.JobList{}])
end
@doc """
Runs a BigQuery SQL query synchronously and returns query results if the query completes within a specified timeout.
## Parameters
- connection (GoogleApi.BigQuery.V2.Connection): Connection to server
- project_id (String.t): Project ID of the project billed for the query
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :body (QueryRequest):
## Returns
{:ok, %GoogleApi.BigQuery.V2.Model.QueryResponse{}} on success
{:error, info} on failure
"""
@spec bigquery_jobs_query(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.BigQuery.V2.Model.QueryResponse.t()} | {:error, Tesla.Env.t()}
def bigquery_jobs_query(connection, project_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/bigquery/v2/projects/{projectId}/queries", %{
"projectId" => URI.encode_www_form(project_id)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQuery.V2.Model.QueryResponse{}])
end
end
| 43.406709 | 303 | 0.67433 |
792c18821dd62dfc1ff197fa0fb0dc49546549fd | 5,062 | ex | Elixir | clients/ad_mob/lib/google_api/ad_mob/v1/model/network_report_spec.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/ad_mob/lib/google_api/ad_mob/v1/model/network_report_spec.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/ad_mob/lib/google_api/ad_mob/v1/model/network_report_spec.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdMob.V1.Model.NetworkReportSpec do
@moduledoc """
The specification for generating an AdMob Network report. For example, the specification to get clicks and estimated earnings for only the 'US' and 'CN' countries can look like the following example: { 'date_range': { 'start_date': {'year': 2018, 'month': 9, 'day': 1}, 'end_date': {'year': 2018, 'month': 9, 'day': 30} }, 'dimensions': ['DATE', 'APP', 'COUNTRY'], 'metrics': ['CLICKS', 'ESTIMATED_EARNINGS'], 'dimension_filters': [ { 'dimension': 'COUNTRY', 'matches_any': {'values': [{'value': 'US', 'value': 'CN'}]} } ], 'sort_conditions': [ {'dimension':'APP', order: 'ASCENDING'}, {'metric':'CLICKS', order: 'DESCENDING'} ], 'localization_settings': { 'currency_code': 'USD', 'language_code': 'en-US' } } For a better understanding, you can treat the preceding specification like the following pseudo SQL: SELECT DATE, APP, COUNTRY, CLICKS, ESTIMATED_EARNINGS FROM NETWORK_REPORT WHERE DATE >= '2018-09-01' AND DATE <= '2018-09-30' AND COUNTRY IN ('US', 'CN') GROUP BY DATE, APP, COUNTRY ORDER BY APP ASC, CLICKS DESC;
## Attributes
* `dateRange` (*type:* `GoogleApi.AdMob.V1.Model.DateRange.t`, *default:* `nil`) - The date range for which the report is generated.
* `dimensionFilters` (*type:* `list(GoogleApi.AdMob.V1.Model.NetworkReportSpecDimensionFilter.t)`, *default:* `nil`) - Describes which report rows to match based on their dimension values.
* `dimensions` (*type:* `list(String.t)`, *default:* `nil`) - List of dimensions of the report. The value combination of these dimensions determines the row of the report. If no dimensions are specified, the report returns a single row of requested metrics for the entire account.
* `localizationSettings` (*type:* `GoogleApi.AdMob.V1.Model.LocalizationSettings.t`, *default:* `nil`) - Localization settings of the report.
* `maxReportRows` (*type:* `integer()`, *default:* `nil`) - Maximum number of report data rows to return. If the value is not set, the API returns as many rows as possible, up to 100000. Acceptable values are 1-100000, inclusive. Any other values are treated as 100000.
* `metrics` (*type:* `list(String.t)`, *default:* `nil`) - List of metrics of the report. A report must specify at least one metric.
* `sortConditions` (*type:* `list(GoogleApi.AdMob.V1.Model.NetworkReportSpecSortCondition.t)`, *default:* `nil`) - Describes the sorting of report rows. The order of the condition in the list defines its precedence; the earlier the condition, the higher its precedence. If no sort conditions are specified, the row ordering is undefined.
* `timeZone` (*type:* `String.t`, *default:* `nil`) - A report time zone. Accepts an IANA TZ name values, such as "America/Los_Angeles." If no time zone is defined, the account default takes effect. Check default value by the get account action. **Warning:** The "America/Los_Angeles" is the only supported value at the moment.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:dateRange => GoogleApi.AdMob.V1.Model.DateRange.t(),
:dimensionFilters =>
list(GoogleApi.AdMob.V1.Model.NetworkReportSpecDimensionFilter.t()),
:dimensions => list(String.t()),
:localizationSettings => GoogleApi.AdMob.V1.Model.LocalizationSettings.t(),
:maxReportRows => integer(),
:metrics => list(String.t()),
:sortConditions => list(GoogleApi.AdMob.V1.Model.NetworkReportSpecSortCondition.t()),
:timeZone => String.t()
}
field(:dateRange, as: GoogleApi.AdMob.V1.Model.DateRange)
field(:dimensionFilters,
as: GoogleApi.AdMob.V1.Model.NetworkReportSpecDimensionFilter,
type: :list
)
field(:dimensions, type: :list)
field(:localizationSettings, as: GoogleApi.AdMob.V1.Model.LocalizationSettings)
field(:maxReportRows)
field(:metrics, type: :list)
field(:sortConditions, as: GoogleApi.AdMob.V1.Model.NetworkReportSpecSortCondition, type: :list)
field(:timeZone)
end
defimpl Poison.Decoder, for: GoogleApi.AdMob.V1.Model.NetworkReportSpec do
def decode(value, options) do
GoogleApi.AdMob.V1.Model.NetworkReportSpec.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdMob.V1.Model.NetworkReportSpec do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 68.405405 | 1,025 | 0.720071 |
792c5b6c05d1eb535fac26f9a037d58060c4ab57 | 1,684 | ex | Elixir | apps/tai/lib/mix/tasks/tai.gen.migration.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | null | null | null | apps/tai/lib/mix/tasks/tai.gen.migration.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | 78 | 2020-10-12T06:21:43.000Z | 2022-03-28T09:02:00.000Z | apps/tai/lib/mix/tasks/tai.gen.migration.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.Tai.Gen.Migration do
@shortdoc "Generates migrations for tai"
@moduledoc """
Generates database migrations for tai repos:
- Tai.NewOrders.OrderRepo
"""
use Mix.Task
import Mix.Ecto
import Mix.Generator
@doc false
def run(args) do
# TODO: Is this check actually required?
# no_umbrella!("ecto.gen.migration")
tai_app_dir = Application.app_dir(:tai)
args
|> tai_repos()
|> Enum.each(fn repo ->
ensure_repo(repo, args)
repo_migrations_destination_dir_path = Ecto.Migrator.migrations_path(repo)
create_directory(repo_migrations_destination_dir_path)
tai_migration_templates_dir_path =
Path.join(tai_app_dir, "priv/repo_templates/#{repo_template_dir(repo)}/migrations")
tai_migration_templates_dir_path
|> File.ls!()
|> Enum.map(&Path.join(tai_migration_templates_dir_path, &1))
|> Enum.map(fn migration_template_path ->
basename = Path.basename(migration_template_path, ".eex")
destination_path = Path.join(repo_migrations_destination_dir_path, basename)
generated_migration = EEx.eval_file(migration_template_path, module_prefix: app_module())
create_file(destination_path, generated_migration)
end)
end)
end
@tai_repos [Tai.NewOrders.OrderRepo]
defp tai_repos(args) do
args
|> parse_repo()
|> Enum.filter(fn r -> Enum.member?(@tai_repos, r) end)
end
defp repo_template_dir(repo) do
repo
|> Module.split()
|> List.last()
|> Macro.underscore()
end
defp app_module do
Mix.Project.config()
|> Keyword.fetch!(:app)
|> to_string()
|> Macro.camelize()
end
end
| 26.3125 | 97 | 0.688836 |
792c6005be7db42c0436e698aeaa0effba303df5 | 847 | ex | Elixir | lib/scenic/view_port/driver/config.ex | mikeover/scenic | 4b61c4996ed2d06b8cdf94f88c8a0522160e10b5 | [
"Apache-2.0"
] | null | null | null | lib/scenic/view_port/driver/config.ex | mikeover/scenic | 4b61c4996ed2d06b8cdf94f88c8a0522160e10b5 | [
"Apache-2.0"
] | null | null | null | lib/scenic/view_port/driver/config.ex | mikeover/scenic | 4b61c4996ed2d06b8cdf94f88c8a0522160e10b5 | [
"Apache-2.0"
] | null | null | null | #
# Created by Boyd Multerer April 2018.
# Copyright © 2018 Kry10 Industries. All rights reserved.
#
# helper module for configuring ViewPorts during startup
defmodule Scenic.ViewPort.Driver.Config do
alias Scenic.ViewPort.Driver.Config
# describe the struct. Name nil and opts as an empty list are good defaults
defstruct module: nil, name: nil, opts: []
# import IEx
def valid?(%Config{module: mod, name: name}) do
is_atom(mod) && !is_nil(mod) && is_atom(name)
end
def valid?(%{} = config), do: valid?(struct(Config, config))
def valid!(%Config{module: mod, name: name})
when is_atom(mod) and not is_nil(mod) and is_atom(name) do
:ok
end
def valid!(%Config{module: _}) do
raise "Driver.Config must reference a valid module"
end
def valid!(%{} = config), do: valid!(struct(Config, config))
end
| 26.46875 | 77 | 0.689492 |
792c6d89ea809689a384b17d7ba2c8c40da8aa70 | 64,420 | exs | Elixir | test/lib/ex_aws/ec2_test.exs | Frameio/ex_aws | 3b335b6ed7932b5cf991323d26cf5497e1e6c122 | [
"Unlicense",
"MIT"
] | null | null | null | test/lib/ex_aws/ec2_test.exs | Frameio/ex_aws | 3b335b6ed7932b5cf991323d26cf5497e1e6c122 | [
"Unlicense",
"MIT"
] | null | null | null | test/lib/ex_aws/ec2_test.exs | Frameio/ex_aws | 3b335b6ed7932b5cf991323d26cf5497e1e6c122 | [
"Unlicense",
"MIT"
] | null | null | null | defmodule ExAws.EC2Test do
use ExUnit.Case, async: true
doctest ExAws.EC2
alias ExAws.EC2
@version "2016-11-15"
defp build_query(action, params \\ %{}) do
action_string = action |> Atom.to_string |> Macro.camelize
%ExAws.Operation.Query{
path: "/",
params: params |> Map.merge(%{"Version" => @version, "Action" => action_string}),
service: :ec2,
action: action
}
end
###################
# Instances Tests #
###################
test "describe_instances no options" do
expected = build_query(:describe_instances, %{})
assert expected == EC2.describe_instances
end
test "describe_instances with filters and instance Ids" do
expected = build_query(:describe_instances, %{
"Filter.1.Name" => "tag",
"Filter.1.Value.1" => "Owner",
"Filter.2.Name" => "instance-type",
"Filter.2.Value.1" => "m1.small",
"Filter.2.Value.2" => "m1.large",
"InstanceId.1" => "i-12345",
"InstanceId.2" => "i-56789"
})
assert expected == EC2.describe_instances(
[filters: [tag: ["Owner"], "instance-type": ["m1.small", "m1.large"]],
instance_ids: ["i-12345", "i-56789"]
])
end
test "describe_instances with dry_run" do
expected = build_query(:describe_instances, %{
"DryRun" => true
})
assert expected == EC2.describe_instances(
[dry_run: true]
)
end
test "describe_instances with next_token and max_results"do
expected = build_query(:describe_instances, %{
"NextToken" => "TestToken",
"MaxResults" => 10
})
assert expected == EC2.describe_instances(
[next_token: "TestToken", max_results: 10]
)
end
test "describe_instance_status no options" do
expected = build_query(:describe_instance_status, %{})
assert expected == EC2.describe_instance_status
end
test "describe_instance_status with filters and max_results" do
expected = build_query(:describe_instance_status, %{
"Filter.1.Name" => "system-status.reachability",
"Filter.1.Value.1" => "failed",
"MaxResults" => 5
})
assert expected == EC2.describe_instance_status(
[filters: ["system-status.reachability": ["failed"]], max_results: 5]
)
end
test "describe_instance_status with instance ids" do
expected = build_query(:describe_instance_status, %{
"InstanceId.1" => "i-123456",
"InstanceId.2" => "i-1a2b3c"
})
assert expected == EC2.describe_instance_status(
[instance_ids: ["i-123456", "i-1a2b3c"]]
)
end
test "describe_instances with dry_run, next_token, and include_all_instances" do
expected = build_query(:describe_instance_status, %{
"DryRun" => true,
"NextToken" => "TestToken",
"IncludeAllInstances" => true
})
assert expected == EC2.describe_instance_status(
[dry_run: true, next_token: "TestToken", include_all_instances: true]
)
end
test "terminate_instances no options" do
expected = build_query(:terminate_instances, %{
"InstanceId.1" => "i-123456",
"InstanceId.2" => "i-987654"
})
assert expected == EC2.terminate_instances(["i-123456", "i-987654"])
end
test "terminate_instances with dry_run set" do
expected = build_query(:terminate_instances, %{
"InstanceId.1" => "i-123456",
"DryRun" => true
})
assert expected == EC2.terminate_instances(["i-123456"], [dry_run: true])
end
test "reboot_instances" do
expected = build_query(:reboot_instances, %{
"InstanceId.1" => "i-123456"
})
assert expected == EC2.reboot_instances(["i-123456"])
end
test "reboot_instances with dry_run set" do
expected = build_query(:reboot_instances, %{
"InstanceId.1" => "i-123456",
"DryRun" => true
})
assert expected == EC2.reboot_instances(["i-123456"], [dry_run: true])
end
test "start_instances no options" do
expected = build_query(:start_instances, %{
"InstanceId.1" => "i-123456",
"InstanceId.2" => "i-987654"
})
assert expected == EC2.start_instances(["i-123456", "i-987654"])
end
test "start_instances with dry_run" do
expected = build_query(:start_instances, %{
"InstanceId.1" => "i-123456",
"DryRun" => false
})
assert expected == EC2.start_instances(["i-123456"], [dry_run: false])
end
test "start_instance with additional_info" do
expected = build_query(:start_instances, %{
"InstanceId.1" => "i-123456",
"AdditionalInfo" => "TestAdditionalInfo"
})
assert expected == EC2.start_instances(["i-123456"], [additional_info: "TestAdditionalInfo"])
end
test "stop_instances no options" do
expected = build_query(:stop_instances, %{
"InstanceId.1" => "i-123456"
})
assert expected == EC2.stop_instances(["i-123456"])
end
test "stop_instances by force" do
expected = build_query(:stop_instances, %{
"InstanceId.1" => "i-123456",
"InstanceId.2" => "i-1234abc",
"Force" => true
})
assert expected == EC2.stop_instances(["i-123456", "i-1234abc"], [force: true])
end
test "stop_instances with dry_run" do
expected = build_query(:stop_instances, %{
"InstanceId" => "i-123456",
"DryRun" => true
})
assert expected == EC2.stop_instances("i-123456", [dry_run: true])
end
test "run_instances with no options" do
expected = build_query(:run_instances, %{
"ImageId" => "ami-123456",
"MinCount" => 3,
"MaxCount" => 3
})
assert expected == EC2.run_instances("ami-123456", 3, 3)
end
test "run_instances with block_device_mappings" do
expected = build_query(:run_instances, %{
"ImageId" => "ami-123456",
"MinCount" => 3,
"MaxCount" => 3,
"BlockDeviceMapping.1.DeviceName" => "/dev/sdc",
"BlockDeviceMapping.1.VirtualName" => "ephemeral10",
"BlockDeviceMapping.2.DeviceName" => "/dev/sdd",
"BlockDeviceMapping.2.VirtualName" => "ephemeral11",
"BlockDeviceMapping.3.DeviceName" => "/dev/sdf",
"BlockDeviceMapping.3.Ebs.DeleteOnTermination" => true,
"BlockDeviceMapping.3.Ebs.VolumeSize" => 100
})
assert expected == EC2.run_instances("ami-123456", 3, 3,
[block_device_mappings: [
[device_name: "/dev/sdc", virtual_name: "ephemeral10"],
[device_name: "/dev/sdd", virtual_name: "ephemeral11"],
[device_name: "/dev/sdf", ebs: [delete_on_termination: true, volume_size: 100]]
]])
end
test "run_instances with client_token" do
expected = build_query(:run_instances, %{
"ImageId" => "ami-123456",
"MinCount" => 3,
"MaxCount" => 3,
"ClientToken" => "TestClientToken"
})
assert expected == EC2.run_instances("ami-123456", 3, 3, [client_token: "TestClientToken"])
end
test "run_instances with disable_api_termination" do
expected = build_query(:run_instances, %{
"ImageId" => "ami-123456",
"MinCount" => 3,
"MaxCount" => 3,
"DisableApiTermination" => true
})
assert expected == EC2.run_instances("ami-123456", 3, 3, [disable_api_termination: true])
end
test "run_instances with dry_run" do
expected = build_query(:run_instances, %{
"ImageId" => "ami-123456",
"MinCount" => 3,
"MaxCount" => 3,
"DryRun" => true
})
assert expected == EC2.run_instances("ami-123456", 3, 3, [dry_run: true])
end
test "run_instances with iam_instance_profile" do
expected = build_query(:run_instances, %{
"ImageId" => "ami-123456",
"MinCount" => 3,
"MaxCount" => 3,
"IamInstanceProfile.Arn" => "TestArn",
"IamInstanceProfile.Name" => "TestName"
})
assert expected == EC2.run_instances("ami-123456", 3, 3,
[iam_instance_profile: [arn: "TestArn", name: "TestName"]])
end
test "run_instances with ipv6_addresses and ipv6_address_count" do
expected = build_query(:run_instances, %{
"ImageId" => "ami-123456",
"MinCount" => 3,
"MaxCount" => 3,
"Ipv6Address.1.Ipv6Address" => "10.0.0.6",
"Ipv6Address.2.Ipv6Address" => "10.0.0.7",
"Ipv6AddressCount" => 2
})
assert expected == EC2.run_instances("ami-123456", 3, 3,
[ipv6_addresses: [[ipv6_address: "10.0.0.6"], [ipv6_address: "10.0.0.7"]], ipv6_address_count: 2])
end
test "run_instances with network_interfaces" do
expected = build_query(:run_instances, %{
"MinCount" => 3,
"MaxCount" => 6,
"ImageId" => "ami-123456",
"NetworkInterface.1.AssociatePublicIpAddress" => true,
"NetworkInterface.1.DeleteOnTermination" => true,
"NetworkInterface.1.Description" => "TestDescription",
"NetworkInterface.1.DeviceIndex" => 3,
"NetworkInterface.1.Ipv6AddressCount" => 3,
"NetworkInterface.1.Ipv6Addresses.1.Ipv6Address" => "10.0.0.1/16",
"NetworkInterface.1.Ipv6Addresses.2.Ipv6Address" => "10.0.0.2/16",
"NetworkInterface.1.NetworkInterfaceId" => "TestNetworkIntefaceId",
"NetworkInterface.1.PrivateIpAddress" => "TestPrivateIpAddress",
"NetworkInterface.1.PrivateIpAddresses.1.PrivateIpAddress" => "10.0.1.2",
"NetworkInterface.1.PrivateIpAddresses.1.Primary" => true,
"NetworkInterface.1.PrivateIpAddresses.2.PrivateIpAddress" => "10.0.2.3",
"NetworkInterface.1.PrivateIpAddresses.2.Primary" => false,
"NetworkInterface.1.Groups.1" => "sg-123456",
"NetworkInterface.1.Groups.2" => "sg-987654",
"NetworkInterface.1.SubnetId" => "sub-123456",
"NetworkInterface.2.SubnetId" => "sub-987654"
})
assert expected == EC2.run_instances("ami-123456", 3, 6,
[network_interfaces: [
[associate_public_ip_address: true,
delete_on_termination: true,
description: "TestDescription",
device_index: 3,
ipv6_address_count: 3,
ipv6_addresses: [
[ipv6_address: "10.0.0.1/16"],
[ipv6_address: "10.0.0.2/16"]
],
network_interface_id: "TestNetworkIntefaceId",
private_ip_address: "TestPrivateIpAddress",
private_ip_addresses: [
[private_ip_address: "10.0.1.2", primary: true],
[private_ip_address: "10.0.2.3", primary: false]
],
groups: ["sg-123456", "sg-987654"],
subnet_id: "sub-123456"
],
[subnet_id: "sub-987654"]
]])
end
test "report_instance_status no options" do
expected = build_query(:report_instance_status, %{
"InstanceId.1" => "i-123456",
"InstanceId.2" => "i-abcdefg",
"Status" => "ok"
})
assert expected == EC2.report_instance_status(["i-123456", "i-abcdefg"], "ok")
end
test "report_instance_status with start_time and reason_codes" do
start_time = %DateTime{year: 2015, month: 5, day: 12, zone_abbr: "UTC",
hour: 12, minute: 20, second: 31, microsecond: {0, 0},
utc_offset: 0, std_offset: 0, time_zone: "Etc/UTC"}
expected = build_query(:report_instance_status, %{
"InstanceId.1" => "i-123456",
"Status" => "ok",
"StartTime" => DateTime.to_iso8601(start_time),
"ReasonCode.1" => "instance-stuck-in-state",
"ReasonCode.2" => "unresponsive"
})
assert expected == EC2.report_instance_status(["i-123456"], "ok",
[start_time: start_time,
reason_codes: ["instance-stuck-in-state", "unresponsive"]])
end
test "report_instance_status with end_time" do
end_time = %DateTime{year: 2015, month: 5, day: 12, zone_abbr: "UTC",
hour: 12, minute: 20, second: 31, microsecond: {0, 0},
utc_offset: 0, std_offset: 0, time_zone: "Etc/UTC"}
expected = build_query(:report_instance_status, %{
"InstanceId.1" => "i-123456",
"Status" => "ok",
"EndTime" => DateTime.to_iso8601(end_time)
})
assert expected == EC2.report_instance_status(["i-123456"], "ok", [end_time: end_time])
end
test "report_instance_status with description" do
expected = build_query(:report_instance_status, %{
"InstanceId.1" => "i-123456",
"Status" => "ok",
"Description" => "Test Description"
})
assert expected == EC2.report_instance_status(["i-123456"], "ok", [description: "Test Description"])
end
test "report_instance_status with dry_run" do
expected = build_query(:report_instance_status, %{
"InstanceId.1" => "i-123456",
"Status" => "ok",
"DryRun" => true
})
assert expected == EC2.report_instance_status(["i-123456"], "ok", [dry_run: true])
end
test "monitor_instances no options" do
expected = build_query(:monitor_instances, %{
"InstanceId.1" => "i-123456",
"InstanceId.2" => "i-a1b2c3"
})
assert expected == EC2.monitor_instances(["i-123456", "i-a1b2c3"])
end
test "monitor_instances with dry_run" do
expected = build_query(:monitor_instances, %{
"InstanceId.1" => "i-123456",
"DryRun" => true
})
assert expected == EC2.monitor_instances(["i-123456"], [dry_run: true])
end
test "unmonitor_instances no options" do
expected = build_query(:unmonitor_instances, %{
"InstanceId.1" => "i-123456",
"InstanceId.2" => "i-a1b2c3"
})
assert expected == EC2.unmonitor_instances(["i-123456", "i-a1b2c3"])
end
test "unmonitor_instances wtih dry_run option" do
expected = build_query(:unmonitor_instances, %{
"InstanceId.1" => "i-123456",
"DryRun" => true
})
assert expected == EC2.unmonitor_instances(["i-123456"], [dry_run: true])
end
test "describe_instance_attribute no options" do
expected = build_query(:describe_instance_attribute, %{
"InstanceId" => "i-123456",
"Attribute" => "description"
})
assert expected == EC2.describe_instance_attribute("i-123456", "description")
end
test "describe_instance_attribute with dry_run" do
expected = build_query(:describe_instance_attribute, %{
"InstanceId" => "i-123456",
"Attribute" => "description",
"DryRun" => true
})
assert expected == EC2.describe_instance_attribute("i-123456", "description", [dry_run: true])
end
test "modify_instance_attribute with attribute" do
expected = build_query(:modify_instance_attribute, %{
"InstanceId" => "i-123456",
"Attribute" => "instanceType"
})
assert expected == EC2.modify_instance_attribute("i-123456", [attribute: "instanceType"])
end
test "modify_instance_attribute with block_device_mappings" do
expected = build_query(:modify_instance_attribute, %{
"InstanceId" => "i-123456",
"BlockDeviceMapping.1.Ebs.DeleteOnTermination" => true,
"BlockDeviceMapping.1.Ebs.VolumeSize" => 100,
"BlockDeviceMapping.1.Ebs.VolumeType" => "gp2",
"BlockDeviceMapping.1.DeviceName" => "xvdb",
"BlockDeviceMapping.2.Ebs.DeleteOnTermination" => false,
"BlockDeviceMapping.2.Ebs.VolumeSize" => 1000,
"BlockDeviceMapping.2.Ebs.VolumeType" => "io1",
"BlockDeviceMapping.2.DeviceName" => "xvdc",
"BlockDeviceMapping.2.VirtualName" => "boop"
})
assert expected == EC2.modify_instance_attribute("i-123456",
[block_device_mappings: [
[device_name: "xvdb", ebs: [delete_on_termination: true, volume_size: 100, volume_type: "gp2"]],
[device_name: "xvdc", virtual_name: "boop", ebs: [delete_on_termination: false, volume_size: 1000, volume_type: "io1"]]
]])
end
test "modify_instance_attribute with disable_api_termination" do
expected = build_query(:modify_instance_attribute, %{
"InstanceId" => "i-123456",
"DisableApiTermination.Value" => true
})
assert expected == EC2.modify_instance_attribute("i-123456",
[disable_api_termination: [value: true]])
end
test "modify_instance_attribute with dry_run" do
expected = build_query(:modify_instance_attribute, %{
"InstanceId" => "i-123456",
"DryRun" => true
})
assert expected == EC2.modify_instance_attribute("i-123456",
[dry_run: true])
end
test "modify_instance_attribute with ebs_optimized" do
expected = build_query(:modify_instance_attribute, %{
"InstanceId" => "i-123456",
"EbsOptimized.Value" => true
})
assert expected == EC2.modify_instance_attribute("i-123456",
[ebs_optimized: [value: true]])
end
test "modify_instance_attribute with ena_support" do
expected = build_query(:modify_instance_attribute, %{
"InstanceId" => "i-123456",
"EnaSupport.Value" => false
})
assert expected == EC2.modify_instance_attribute("i-123456",
[ena_support: [value: false]])
end
test "modify_instance_attribute with group_ids" do
expected = build_query(:modify_instance_attribute, %{
"InstanceId" => "i-123456",
"GroupId.1" => "sg-123456",
"GroupId.2" => "sg-wasd"
})
assert expected == EC2.modify_instance_attribute("i-123456",
[group_ids: ["sg-123456", "sg-wasd"]])
end
test "modify_instance_attribute with instance_initiated_shutdown_behavior" do
expected = build_query(:modify_instance_attribute, %{
"InstanceId" => "i-123456",
"InstanceInitiatedShutdownBehavior.Value" => "terminate"
})
assert expected == EC2.modify_instance_attribute("i-123456",
[instance_initiated_shutdown_behavior: [value: "terminate"]])
end
test "modify_instance_attribute with instance_type" do
expected = build_query(:modify_instance_attribute, %{
"InstanceId" => "i-123456",
"InstanceType.Value" => "m1.small"
})
assert expected == EC2.modify_instance_attribute("i-123456",
[instance_type: [value: "m1.small"]])
end
test "modify_instance_attribute with kernel" do
expected = build_query(:modify_instance_attribute, %{
"InstanceId" => "i-123456",
"Kernel.Value" => "test_kernel"
})
assert expected == EC2.modify_instance_attribute("i-123456",
[kernel: [value: "test_kernel"]])
end
test "modify_instance_attribute with ramdisk" do
expected = build_query(:modify_instance_attribute, %{
"InstanceId" => "i-123456",
"Ramdisk.Value" => "test_ramdisk"
})
assert expected == EC2.modify_instance_attribute("i-123456",
[ramdisk: [value: "test_ramdisk"]])
end
test "modify_instance_attribute with source_dest_check" do
expected = build_query(:modify_instance_attribute, %{
"InstanceId" => "i-123456",
"SourceDestCheck.Value" => true
})
assert expected == EC2.modify_instance_attribute("i-123456",
[source_dest_check: [value: true]])
end
test "modify_instance_attribute with sriov_net_support" do
expected = build_query(:modify_instance_attribute, %{
"InstanceId" => "i-123456",
"SriovNetSupport.Value" => "test_sriov_net_support"
})
assert expected == EC2.modify_instance_attribute("i-123456",
[sriov_net_support: [value: "test_sriov_net_support"]])
end
test "modify_instance_attribute with user_data" do
expected = build_query(:modify_instance_attribute, %{
"InstanceId" => "i-123456",
"UserData.Value" => "test_user_data"
})
assert expected == EC2.modify_instance_attribute("i-123456",
[user_data: [value: "test_user_data"]])
end
test "modify_instance_attribute with value" do
expected = build_query(:modify_instance_attribute, %{
"InstanceId" => "i-123456",
"Value" => "test_value"
})
assert expected == EC2.modify_instance_attribute("i-123456",
[value: "test_value"])
end
test "reset_instance_attribute no options" do
expected = build_query(:reset_instance_attribute, %{
"InstanceId" => "i-123456",
"Attribute" => "kernel"
})
assert expected == EC2.reset_instance_attribute("i-123456", "kernel")
end
test "reset_instance_attribute with dry_run" do
expected = build_query(:reset_instance_attribute, %{
"InstanceId" => "i-123456",
"Attribute" => "kernel",
"DryRun" => true
})
assert expected == EC2.reset_instance_attribute("i-123456", "kernel", [dry_run: true])
end
test "get_console_output no options" do
expected = build_query(:get_console_output, %{
"InstanceId" => "i-123456"
})
assert expected == EC2.get_console_output("i-123456")
end
test "get_console_output with dry_run" do
expected = build_query(:get_console_output, %{
"InstanceId" => "i-123456",
"DryRun" => true
})
assert expected == EC2.get_console_output("i-123456", [dry_run: true])
end
test "get_password_data no options" do
expected = build_query(:get_password_data, %{
"InstanceId" => "i-123456"
})
assert expected == EC2.get_password_data("i-123456")
end
test "get_password_data with dry_run" do
expected = build_query(:get_password_data, %{
"InstanceId" => "i-123456",
"DryRun" => true
})
assert expected == EC2.get_password_data("i-123456", [dry_run: true])
end
#############
# AMI Tests #
#############
test "create_image no options" do
expected = build_query(:create_image, %{
"InstanceId" => "i-123456",
"Name" => "TestName"
})
assert expected == EC2.create_image("i-123456", "TestName")
end
test "create_image with block_device_mappings" do
expected = build_query(:create_image, %{
"InstanceId" => "i-123456",
"Name" => "TestName",
"BlockDeviceMapping.1.Ebs.DeleteOnTermination" => true,
"BlockDeviceMapping.1.Ebs.VolumeSize" => 100,
"BlockDeviceMapping.1.Ebs.VolumeType" => "gp2",
"BlockDeviceMapping.1.DeviceName" => "xvdb"
})
assert expected == EC2.create_image("i-123456", "TestName",
[block_device_mappings: [
[
device_name: "xvdb",
ebs: [
delete_on_termination: true,
volume_size: 100,
volume_type: "gp2",
]
]
]
])
end
test "create_image with description" do
expected = build_query(:create_image, %{
"InstanceId" => "i-123456",
"Name" => "TestName",
"Description" => "TestDescription"
})
assert expected == EC2.create_image("i-123456", "TestName", [description: "TestDescription"])
end
test "create_image with dry_run" do
expected = build_query(:create_image, %{
"InstanceId" => "i-123456",
"Name" => "TestName",
"DryRun" => true
})
assert expected == EC2.create_image("i-123456", "TestName", [dry_run: true])
end
test "create_image with no_reboot" do
expected = build_query(:create_image, %{
"InstanceId" => "i-123456",
"Name" => "TestName",
"NoReboot" => true
})
assert expected == EC2.create_image("i-123456", "TestName", [no_reboot: true])
end
test "copy_image no options" do
expected = build_query(:copy_image, %{
"SourceRegion" => "us-west-2",
"SourceImageId" => "ami-1a2b3c4d",
"Name" => "Test AMI"
})
assert expected == EC2.copy_image("Test AMI", "ami-1a2b3c4d", "us-west-2")
end
test "copy_image with client_token" do
expected = build_query(:copy_image, %{
"SourceRegion" => "us-west-2",
"SourceImageId" => "ami-1a2b3c4d",
"Name" => "Test AMI",
"ClientToken" => "TestClientToken"
})
assert expected == EC2.copy_image("Test AMI", "ami-1a2b3c4d", "us-west-2",
[client_token: "TestClientToken"])
end
test "copy_image with description" do
expected = build_query(:copy_image, %{
"SourceRegion" => "us-west-2",
"SourceImageId" => "ami-1a2b3c4d",
"Name" => "Test AMI",
"Description" => "Test Description"
})
assert expected == EC2.copy_image("Test AMI", "ami-1a2b3c4d", "us-west-2",
[description: "Test Description"])
end
test "copy_image with dry_run" do
expected = build_query(:copy_image, %{
"SourceRegion" => "us-west-2",
"SourceImageId" => "ami-1a2b3c4d",
"Name" => "Test AMI",
"DryRun" => true
})
assert expected == EC2.copy_image("Test AMI", "ami-1a2b3c4d", "us-west-2",
[dry_run: true])
end
test "copy_image with encrypted" do
expected = build_query(:copy_image, %{
"SourceRegion" => "us-west-2",
"SourceImageId" => "ami-1a2b3c4d",
"Name" => "Test AMI",
"Encrypted" => true
})
assert expected == EC2.copy_image("Test AMI", "ami-1a2b3c4d", "us-west-2",
[encrypted: true])
end
test "copy_image with kms_key_id" do
expected = build_query(:copy_image, %{
"SourceRegion" => "us-west-2",
"SourceImageId" => "ami-1a2b3c4d",
"Name" => "Test AMI",
"KmsKeyId" => "Test_Kms_Key_Id"
})
assert expected == EC2.copy_image("Test AMI", "ami-1a2b3c4d", "us-west-2",
[kms_key_id: "Test_Kms_Key_Id"])
end
test "describe_images" do
expected = build_query(:describe_images, %{})
assert expected == EC2.describe_images
end
test "describe_images with image ids" do
expected = build_query(:describe_images, %{
"ImageId.1" => "ami-1234567",
"ImageId.2" => "ami-test123"
})
assert expected == EC2.describe_images(
[image_ids: ["ami-1234567", "ami-test123"]])
end
test "describe_images with owners" do
expected = build_query(:describe_images, %{
"Owner.1" => "test_owner",
"Owner.2" => "aws"
})
assert expected == EC2.describe_images(
owners: ["test_owner", "aws"]
)
end
test "describe_images with filters" do
expected = build_query(:describe_images, %{
"Filter.1.Name" => "is-public",
"Filter.1.Value.1" => true,
"Filter.2.Name" => "architecture",
"Filter.2.Value.1" => "x86_64",
"Filter.3.Name" => "platform",
"Filter.3.Value.1" => "windows",
"Filter.3.Value.2" => "linux"
})
assert expected == EC2.describe_images(
[
filters: [
"is-public": [true],
"architecture": ["x86_64"],
"platform": ["windows", "linux"]
]
]
)
end
test "describe_images with executable_by_list" do
expected = build_query(:describe_images, %{
"ExecutableBy.1" => "dog",
"ExecutableBy.2" => "me",
"ExecutableBy.3" => "rhino"
})
assert expected == EC2.describe_images(
[executable_by_list: ["dog", "me", "rhino"]]
)
end
test "describe_images with dry_run" do
expected = build_query(:describe_images, %{
"DryRun" => true
})
assert expected == EC2.describe_images(
[dry_run: true]
)
end
test "describe_image_attributes no options" do
expected = build_query(:describe_image_attribute, %{
"ImageId" => "ami-1234567",
"Attribute" => "description"
})
assert expected == EC2.describe_image_attribute("ami-1234567", "description")
end
test "describe_image_attribute with dry_run" do
expected = build_query(:describe_image_attribute, %{
"ImageId" => "ami-1234567",
"Attribute" => "description",
"DryRun" => true
})
assert expected == EC2.describe_image_attribute("ami-1234567", "description",
[dry_run: true])
end
test "modify_image_attribute no options" do
expected = build_query(:modify_image_attribute, %{
"ImageId" => "ami-123456"
})
assert expected == EC2.modify_image_attribute("ami-123456")
end
test "modify_image_attributes with launch_permission" do
expected = build_query(:modify_image_attribute, %{
"ImageId" => "ami-123456",
"LaunchPermission.Add.1.Group" => "a12",
"LaunchPermission.Remove.1.UserId" => "999988887777",
"LaunchPermission.Add.2.UserId" => "111122223333",
"LaunchPermission.Add.2.Group" => "testGroup"
})
assert expected == EC2.modify_image_attribute("ami-123456", [
launch_permission: [
remove:
[
[user_id: "999988887777"]
],
add:
[
[group: "a12"],
[user_id: "111122223333", group: "testGroup"]
]
]
])
end
test "modify_image_attributes with description" do
expected = build_query(:modify_image_attribute, %{
"ImageId" => "ami-123456",
"Description.Value" => "New Description"
})
assert expected == EC2.modify_image_attribute("ami-123456", [
description: [value: "New Description"]
])
end
test "modify_image_attributes with product_codes" do
expected = build_query(:modify_image_attribute, %{
"ImageId" => "ami-123456",
"ProductCode.1" => "774F4FF8",
"ProductCode.2" => "12345ABC"
})
assert expected == EC2.modify_image_attribute("ami-123456",
[
product_codes: ["774F4FF8", "12345ABC"]
])
end
test "reset_image_attribute no options" do
expected = build_query(:reset_image_attribute, %{
"ImageId" => "ami-123456",
"Attribute" => "launchPermission"
})
assert expected == EC2.reset_image_attribute("ami-123456", "launchPermission")
end
test "reset_image_attribute with dry_run" do
expected = build_query(:reset_image_attribute, %{
"ImageId" => "ami-123456",
"Attribute" => "launchPermission",
"DryRun" => true
})
assert expected == EC2.reset_image_attribute("ami-123456", "launchPermission", [dry_run: true])
end
test "register_image no options" do
expected = build_query(:register_image, %{
"Name" => "Test"
})
assert expected == EC2.register_image("Test")
end
test "register_image with architecture" do
expected = build_query(:register_image, %{
"Name" => "Test",
"Architecture" => "x86"
})
assert expected == EC2.register_image("Test", [architecture: "x86"])
end
test "register_image with billing_products" do
expected = build_query(:register_image, %{
"Name" => "Test",
"BillingProduct.1" => "TestProduct_1",
"BillingProduct.2" => "TestProduct_2"
})
assert expected == EC2.register_image("Test", [
billing_products: ["TestProduct_1", "TestProduct_2"]
])
end
test "register_image with block_device_mappings" do
expected = build_query(:register_image, %{
"Name" => "Test",
"BlockDeviceMapping.1.DeviceName" => "/dev/sda1",
"BlockDeviceMapping.1.Ebs.SnapshotId" => "snap-1234567890abcdef0",
"BlockDeviceMapping.2.DeviceName" => "/dev/sdb",
"BlockDeviceMapping.2.Ebs.SnapshotId" => "snap-1234567890abcdef1",
"BlockDeviceMapping.3.DeviceName" => "/dev/sdc",
"BlockDeviceMapping.3.Ebs.VolumeSize" => 100
})
assert expected == EC2.register_image("Test", [block_device_mappings: [
[device_name: "/dev/sda1", ebs: [snapshot_id: "snap-1234567890abcdef0"]],
[device_name: "/dev/sdb", ebs: [snapshot_id: "snap-1234567890abcdef1"]],
[device_name: "/dev/sdc", ebs: [volume_size: 100]]
]])
end
test "register_image with description" do
expected = build_query(:register_image, %{
"Name" => "Test",
"Description" => "TestDescription"
})
assert expected == EC2.register_image("Test", [description: "TestDescription"])
end
test "register_image with dry_run" do
expected = build_query(:register_image, %{
"Name" => "Test",
"DryRun" => true
})
assert expected == EC2.register_image("Test", [dry_run: true])
end
test "register image with ena_support" do
expected = build_query(:register_image, %{
"Name" => "Test",
"EnaSupport" => true
})
assert expected == EC2.register_image("Test", [ena_support: true])
end
test "register image with image_location" do
expected = build_query(:register_image, %{
"Name" => "Test",
"ImageLocation" => "/path/to/here"
})
assert expected == EC2.register_image("Test", [image_location: "/path/to/here"])
end
test "register_image with kernel_id" do
expected = build_query(:register_image, %{
"Name" => "Test",
"KernelId" => "TestKernelId"
})
assert expected == EC2.register_image("Test", [kernel_id: "TestKernelId"])
end
test "register_image with ramdisk_id" do
expected = build_query(:register_image, %{
"Name" => "Test",
"RamdiskId" => "TestRamDiskId"
})
assert expected == EC2.register_image("Test", [ramdisk_id: "TestRamDiskId"])
end
test "register_image with root_device_name" do
expected = build_query(:register_image, %{
"Name" => "Test",
"RootDeviceName" => "/dev/sda1"
})
assert expected == EC2.register_image("Test", [root_device_name: "/dev/sda1"])
end
test "register_image with sriov_net_support" do
expected = build_query(:register_image, %{
"Name" => "Test",
"SriovNetSupport" => "simple"
})
assert expected == EC2.register_image("Test", [sriov_net_support: "simple"])
end
test "register_image with virtualization_type" do
expected = build_query(:register_image, %{
"Name" => "Test",
"VirtualizationType" => "paravirtual"
})
assert expected == EC2.register_image("Test", [virtualization_type: "paravirtual"])
end
test "deregister_image no options" do
expected = build_query(:deregister_image, %{
"ImageId" => "ami-123456"
})
assert expected == EC2.deregister_image("ami-123456")
end
test "deregister_image with dry_run option" do
expected = build_query(:deregister_image, %{
"ImageId" => "ami-123456",
"DryRun" => true
})
assert expected == EC2.deregister_image("ami-123456", [dry_run: true])
end
#################
# Volumes Tests #
#################
test "attach_volume no options" do
expected = build_query(:attach_volume, %{
"InstanceId" => "i-123456",
"VolumeId" => "vol-123456",
"Device" => "/dev/sdb"
})
assert expected == EC2.attach_volume("i-123456", "vol-123456", "/dev/sdb")
end
test "attach_volume with dry_run" do
expected = build_query(:attach_volume, %{
"InstanceId" => "i-123456",
"VolumeId" => "vol-123456",
"Device" => "/dev/sdb",
"DryRun" => false
})
assert expected == EC2.attach_volume("i-123456", "vol-123456", "/dev/sdb", [dry_run: false])
end
test "detach_volume no options" do
expected = build_query(:detach_volume, %{"VolumeId" => "vol-123456"})
assert expected == EC2.detach_volume("vol-123456")
end
test "detach_volume with force" do
expected = build_query(:detach_volume, %{
"VolumeId" => "vol-123456",
"Force" => false
})
assert expected == EC2.detach_volume("vol-123456", [force: false])
end
test "detach_volume with dry_run" do
expected = build_query(:detach_volume, %{
"VolumeId" => "vol-123456",
"DryRun" => true
})
assert expected == EC2.detach_volume("vol-123456", [dry_run: true])
end
test "detach_volume with device" do
expected = build_query(:detach_volume, %{
"VolumeId" => "vol-123456",
"Device" => "/dev/sdh"
})
assert expected == EC2.detach_volume("vol-123456", [device: "/dev/sdh"])
end
test "delete_volume with no additional params" do
expected = build_query(:delete_volume, %{"VolumeId" => "vol-123456"})
assert expected == EC2.delete_volume("vol-123456")
end
test "delete_volume with dry_run param" do
expected = build_query(:delete_volume, %{
"VolumeId" => "vol-123456",
"DryRun" => true})
assert expected == EC2.delete_volume("vol-123456", [dry_run: true])
end
test "create_volume test with tag specifications" do
expected = build_query(:create_volume, %{
"AvailabilityZone" => "us-east-1a",
"TagSpecification.1.ResourceType" => "volume",
"TagSpecification.1.Tag.1.Key" => "tag_key_foo",
"TagSpecification.1.Tag.1.Value" => "tag_value_foo",
"TagSpecification.1.Tag.2.Key" => "tag_key_bar",
"TagSpecification.1.Tag.2.Value" => "tag_value_bar",
"TagSpecification.2.ResourceType" => "volume",
"TagSpecification.2.Tag.1.Key" => "tag_key_baz",
"TagSpecification.2.Tag.1.Value" => "tag_value_baz",
})
assert expected == EC2.create_volume("us-east-1a",
[tag_specifications: [
volume:
[tag_key_foo: "tag_value_foo",
tag_key_bar: "tag_value_bar"],
volume:
[tag_key_baz: "tag_value_baz"]
]
])
end
test "create_volume test with iops, snapshot ID and volume type" do
expected = build_query(:create_volume, %{
"AvailabilityZone" => "us-east-1a",
"SnapshotId" => "snap-123456",
"VolumeType" => "io1",
"Iops" => 3000
})
assert expected == EC2.create_volume("us-east-1a",
[snapshot_id: "snap-123456", volume_type: "io1", iops: 3000])
end
test "create_volume test with kms_key_id" do
expected = build_query(:create_volume, %{
"AvailabilityZone" => "us-east-1a",
"KmsKeyId" => "TestKmsKeyId"
})
assert expected == EC2.create_volume("us-east-1a",
[kms_key_id: "TestKmsKeyId"])
end
test "create_volume test with encrypted" do
expected = build_query(:create_volume, %{
"AvailabilityZone" => "us-east-1a",
"Encrypted" => true
})
assert expected == EC2.create_volume("us-east-1a",
[encrypted: true])
end
test "modify_volume test" do
expected = build_query(:modify_volume, %{"VolumeId" => "vol-123456"})
assert expected == EC2.modify_volume("vol-123456")
end
test "modify_volume test with iops, size, and volume type" do
expected = build_query(:modify_volume, %{
"VolumeId" => "vol-123456",
"Iops" => 3000,
"Size" => 1024,
"VolumeType" => "io1"
})
assert expected == EC2.modify_volume("vol-123456",
[iops: 3000, size: 1024, volume_type: "io1"])
end
test "enable_volume_io no options" do
expected = build_query(:enable_volume_i_o, %{
"VolumeId" => "vol-123456"
})
assert expected == EC2.enable_volume_io("vol-123456")
end
test "describe_volumes no options" do
expected = build_query(:describe_volumes, %{})
assert expected == EC2.describe_volumes
end
test "describe_volumes with filters" do
expected = build_query(:describe_volumes, %{
"Filter.1.Name" => "tag-key",
"Filter.1.Value.1" => "*_db_*",
"Filter.1.Value.2" => "test"
})
assert expected == EC2.describe_volumes([
filters: ["tag-key": ["*_db_*", "test"]]
])
end
test "describe_volumes with volume_ids" do
expected = build_query(:describe_volumes, %{
"VolumeId.1" => "vol-123456",
"VolumeId.2" => "vol-1a2b3c",
"VolumeId.3" => "vol-4d5e6f"
})
assert expected == EC2.describe_volumes([
volume_ids: ["vol-123456", "vol-1a2b3c", "vol-4d5e6f"]
])
end
test "describe_volume_status no option" do
expected = build_query(:describe_volume_status, %{})
assert expected == EC2.describe_volume_status
end
test "describe_volume_status with filters and volume_ids" do
expected = build_query(:describe_volume_status, %{
"Filter.1.Name" => "availability-zone",
"Filter.1.Value.1" => "us-east-1d",
"Filter.2.Name" => "volume-status.details-name",
"Filter.2.Value.1" => "io-enabled",
"Filter.3.Name" => "volume-status.details-status",
"Filter.3.Value.1" => "failed",
"VolumeId.1" => "vol-1234567",
"VolumeId.2" => "vol-9876543"
})
assert expected == EC2.describe_volume_status([
filters: [
"availability-zone": ["us-east-1d"],
"volume-status.details-name": ["io-enabled"],
"volume-status.details-status": ["failed"],
],
volume_ids: ["vol-1234567", "vol-9876543"]
])
end
test "modify_volume_attribute" do
expected = build_query(:modify_volume_attribute, %{
"VolumeId" => "vol-123456"
})
assert expected == EC2.modify_volume_attribute("vol-123456")
end
test "modify_volume_attribute with dry_run and auto_enable_io params" do
expected = build_query(:modify_volume_attribute, %{
"VolumeId" => "vol-123456",
"AutoEnableIO.Value" => true,
"DryRun" => true
})
assert expected == EC2.modify_volume_attribute("vol-123456", [auto_enable_io: [value: true], dry_run: true])
end
test "describe_volume_attribute no options" do
expected = build_query(:describe_volume_attribute, %{
"VolumeId" => "vol-123456",
"Attribute" => "autoEnableIO"
})
assert expected == EC2.describe_volume_attribute("vol-123456", "autoEnableIO")
end
test "describe_volume_attribute with dry_run" do
expected = build_query(:describe_volume_attribute, %{
"VolumeId" => "vol-123456",
"Attribute" => "autoEnableIO",
"DryRun" => true
})
assert expected == EC2.describe_volume_attribute("vol-123456", "autoEnableIO", [dry_run: true])
end
###################
# Snapshots Tests #
###################
test "describe_snapshots no options" do
expected = build_query(:describe_snapshots, %{})
assert expected == EC2.describe_snapshots
end
test "describe_snapshots with filters" do
expected = build_query(:describe_snapshots, %{
"Filter.1.Name" => "status",
"Filter.1.Value.1" => "pending",
"Filter.2.Name" => "tag-value",
"Filter.2.Value.1" => "*_db_*",
})
assert expected == EC2.describe_snapshots(
[filters: ["status": ["pending"], "tag-value": ["*_db_*"]],
])
end
test "describe_snapshots with owners" do
expected = build_query(:describe_snapshots, %{
"Owner.1" => "TestOwner",
"Owner.2" => "Bees",
"Owner.3" => "Oatmeal"
})
assert expected == EC2.describe_snapshots([owners: ["TestOwner", "Bees", "Oatmeal"]])
end
test "create_snapshot no options" do
expected = build_query(:create_snapshot, %{
"VolumeId" => "vol-123456"
})
assert expected == EC2.create_snapshot("vol-123456")
end
test "create_snapshot with description" do
expected = build_query(:create_snapshot, %{
"VolumeId" => "vol-123456",
"Description" => "Test Description"
})
assert expected == EC2.create_snapshot("vol-123456", [description: "Test Description"])
end
test "copy_snapshot with no options" do
expected = build_query(:copy_snapshot, %{
"SourceSnapshotId" => "snap-123456",
"SourceRegion" => "us-east-1"
})
assert expected == EC2.copy_snapshot("snap-123456", "us-east-1")
end
test "copy_snapshot with description" do
expected = build_query(:copy_snapshot, %{
"SourceSnapshotId" => "snap-123456",
"SourceRegion" => "us-east-1",
"Description" => "TestDescription"
})
assert expected == EC2.copy_snapshot("snap-123456", "us-east-1", [description: "TestDescription"])
end
test "copy_snapshot with destination_region" do
expected = build_query(:copy_snapshot, %{
"SourceSnapshotId" => "snap-123456",
"SourceRegion" => "us-east-1",
"DestinationRegion" => "us-west-1"
})
assert expected == EC2.copy_snapshot("snap-123456", "us-east-1", [destination_region: "us-west-1"])
end
test "copy_snapshot with kms_key_id" do
expected = build_query(:copy_snapshot, %{
"SourceSnapshotId" => "snap-123456",
"SourceRegion" => "us-east-1",
"KmsKeyId" => "TestKmsKeyId"
})
assert expected == EC2.copy_snapshot("snap-123456", "us-east-1", [kms_key_id: "TestKmsKeyId"])
end
test "copy_snapshot with all optional options" do
expected = build_query(:copy_snapshot, %{
"SourceSnapshotId" => "snap-123456",
"SourceRegion" => "us-east-1",
"KmsKeyId" => "Test_Kms_Key_Id",
"DestinationRegion" => "us-west-1",
"Encrypted" => true,
"PresignedUrl" => "Test_Url",
"DryRun" => true
})
assert expected == EC2.copy_snapshot("snap-123456", "us-east-1",
[kms_key_id: "Test_Kms_Key_Id",
destination_region: "us-west-1",
encrypted: true,
presigned_url: "Test_Url",
dry_run: true])
end
test "delete_snapshot with no options" do
expected = build_query(:delete_snapshot, %{
"SnapshotId" => "snap-123456"
})
assert expected == EC2.delete_snapshot("snap-123456")
end
test "delete_snapshot with dry_run" do
expected = build_query(:delete_snapshot, %{
"SnapshotId" => "snap-123456",
"DryRun" => true
})
assert expected == EC2.delete_snapshot("snap-123456", [dry_run: true])
end
test "describe_snapshot_attribute no options" do
expected = build_query(:describe_snapshot_attribute, %{
"SnapshotId" => "snap-123456",
"Attribute" => "productCodes"
})
assert expected == EC2.describe_snapshot_attribute("snap-123456", "productCodes")
end
test "describe_snapshot_attribute dry_run" do
expected = build_query(:describe_snapshot_attribute, %{
"SnapshotId" => "snap-123456",
"Attribute" => "productCodes",
"DryRun" => true
})
assert expected == EC2.describe_snapshot_attribute("snap-123456", "productCodes", [dry_run: true])
end
test "modify_snapshot_attribute no options" do
expected = build_query(:modify_snapshot_attribute, %{
"SnapshotId" => "snap-123456"
})
assert expected == EC2.modify_snapshot_attribute("snap-123456")
end
test "modify_snapshot_attribute with create_volume_permission" do
expected = build_query(:modify_snapshot_attribute, %{
"SnapshotId" => "snap-123456",
"CreateVolumePermission.Add.1.Group" => "a11",
"CreateVolumePermission.Remove.1.UserId" => "111122223333"
})
assert expected == EC2.modify_snapshot_attribute("snap-123456", [
create_volume_permission: [
add: [
[group: "a11"]
],
remove: [
[user_id: "111122223333"]
]
]
])
end
test "reset_snapshot_attribute no options" do
expected = build_query(:reset_snapshot_attribute, %{
"SnapshotId" => "snap-123456",
"Attribute" => "description"
})
assert expected == EC2.reset_snapshot_attribute("snap-123456", "description")
end
###########################
# Bundle Tasks Operations #
###########################
test "bundle_instance with no options" do
expected = build_query(:bundle_instance, %{
"InstanceId" => "i-123456",
"Storage.S3.AWSAccessKeyId" => "TestAwsAccessKeyId",
"Storage.S3.Bucket" => "TestBucket",
"Storage.S3.Prefix" => "TestPrefix",
"Storage.S3.UploadPolicy" => "TestUploadPolicy",
"Storage.S3.UploadPolicySignature" => "TestUploadPolicySignature"
})
assert expected == EC2.bundle_instance("i-123456", "TestAwsAccessKeyId", "TestBucket", "TestPrefix", "TestUploadPolicy", "TestUploadPolicySignature")
end
test "cancel_bundle_task with no options" do
expected = build_query(:cancel_bundle_task, %{
"BundleId" => "test_bundle_id"
})
assert expected == EC2.cancel_bundle_task("test_bundle_id")
end
test "cancel_bundle_task with dry_run options" do
expected = build_query(:cancel_bundle_task, %{
"BundleId" => "test_bundle_id",
"DryRun" => true
})
assert expected == EC2.cancel_bundle_task("test_bundle_id", [dry_run: true])
end
test "describe_bundle_tasks with no options" do
expected = build_query(:describe_bundle_tasks, %{})
assert expected == EC2.describe_bundle_tasks
end
test "describe_bundle_tasks with bundle_ids" do
expected = build_query(:describe_bundle_tasks, %{
"BundleId.1" => "bun-c1a540a8",
"BundleId.2" => "bun-1a2b3c4d",
"BundleId.3" => "bun-12345678"
})
assert expected == EC2.describe_bundle_tasks([bundle_ids: ["bun-c1a540a8", "bun-1a2b3c4d", "bun-12345678"]])
end
test "describe_bundle_tasks with filters" do
expected = build_query(:describe_bundle_tasks, %{
"Filter.1.Name" => "s3-bucket",
"Filter.1.Value.1" => "myawsbucket",
"Filter.2.Name" => "state",
"Filter.2.Value.1" => "completed",
"Filter.2.Value.2" => "failed"
})
assert expected == EC2.describe_bundle_tasks([filters: [
"s3-bucket": ["myawsbucket"],
"state": ["completed", "failed"]
]])
end
##############
# Tags Tests #
##############
test "describe_tags" do
expected = build_query(:describe_tags, %{})
assert expected == EC2.describe_tags
end
test "describe_tags with filters" do
expected = build_query(:describe_tags, %{
"Filter.1.Name" => "resource-type",
"Filter.1.Value.1" => "instance",
"Filter.1.Value.2" => "snapshot"
})
assert expected == EC2.describe_tags(
[filters: [
"resource-type": ["instance", "snapshot"]
]])
end
test "create_tags no options" do
expected = build_query(:create_tags, %{
"ResourceId.1" => "ami-1a2b3c4d",
"ResourceId.2" => "i-1234567890abcdefg",
"Tag.1.Key" => "webserver",
"Tag.1.Value" => "",
"Tag.2.Key" => "stack",
"Tag.2.Value" => "Production"
})
assert expected == EC2.create_tags(
["ami-1a2b3c4d", "i-1234567890abcdefg"],
["webserver": "", "stack": "Production"])
end
test "delete_tags with no options" do
expected = build_query(:delete_tags, %{
"ResourceId.1" => "ami-1a2b3c4ed"
})
assert expected == EC2.delete_tags(["ami-1a2b3c4ed"])
end
test "delete_tags with tags" do
expected = build_query(:delete_tags, %{
"ResourceId.1" => "ami-1a2b3c4ed",
"Tag.1.Key" => "webserver",
"Tag.1.Value" => "",
"Tag.2.Key" => "stack",
"Tag.2.Value" => ""
})
assert expected == EC2.delete_tags(
["ami-1a2b3c4ed"],
[tags: ["webserver": "", "stack": ""]])
end
test "delete_tags with dry_run" do
expected = build_query(:delete_tags, %{
"ResourceId.1" => "ami-1234567",
"ResourceId.2" => "i-abc123def456",
"DryRun" => true
})
assert expected == EC2.delete_tags(
["ami-1234567", "i-abc123def456"], [dry_run: true])
end
########################################
# Regions and Availability Zones Tests #
########################################
test "describe_availability_zones with zone names" do
expected = build_query(:describe_availability_zones, %{
"ZoneName.1" => "us-east-1d",
"ZoneName.2" => "us-east-1a"
})
assert expected == EC2.describe_availability_zones(
[zone_names: ["us-east-1d", "us-east-1a"]])
end
test "describe_regions" do
expected = build_query(:describe_regions, %{})
assert expected == EC2.describe_regions
end
test "describe_regions with region names" do
expected = build_query(:describe_regions, %{
"RegionName.1" => "us-east-1",
"RegionName.2" => "eu-west-1"
})
assert expected == EC2.describe_regions(
[region_names: ["us-east-1", "eu-west-1"]])
end
######################
# Resource Ids Tests #
######################
test "describe_id_format" do
expected = build_query(:describe_id_format, %{})
assert expected == EC2.describe_id_format
end
test "describe_id_format with instance resource" do
expected = build_query(:describe_id_format, %{
"Resource" => "instance"
})
assert expected == EC2.describe_id_format([resource: "instance"])
end
test "modify_id_format" do
expected = build_query(:modify_id_format, %{
"Resource" => "instance",
"UseLongIds" => true
})
assert expected == EC2.modify_id_format("instance", true)
end
############################
# Account Attributes Tests #
############################
test "describe_account_attributes" do
expected = build_query(:describe_account_attributes, %{})
assert expected == EC2.describe_account_attributes
end
test "describe_account_attributes with attribute name" do
expected = build_query(:describe_account_attributes, %{
"AttributeName.1" => "supported-platforms"
})
assert expected ==
EC2.describe_account_attributes([attribute_names: ["supported-platforms"]])
end
#############
# VPC Tests #
#############
test "describe_vpcs" do
expected = build_query(:describe_vpcs, %{})
assert expected == EC2.describe_vpcs
end
test "describe_vpcs with filters" do
expected = build_query(:describe_vpcs, %{
"Filter.1.Name" => "options-id",
"Filter.1.Value.1" => "dopt-7a8b9c2d",
"Filter.1.Value.2" => "dopt-2b2a3d3c",
"Filter.2.Name" => "state",
"Filter.2.Value.1" => "available"
})
assert expected == EC2.describe_vpcs(
filters: ["options-id": ["dopt-7a8b9c2d", "dopt-2b2a3d3c"],
"state": ["available"]])
end
test "describe vpcs with vpc ids" do
expected = build_query(:describe_vpcs, %{
"VpcId.1" => "vpc-123456",
"VpcId.2" => "vpc-a1b2c3"
})
assert expected == EC2.describe_vpcs(
vpc_ids: ["vpc-123456", "vpc-a1b2c3"]
)
end
test "create_vpc" do
expected = build_query(:create_vpc, %{
"CidrBlock" => "10.0.0.0/16"
})
assert expected == EC2.create_vpc("10.0.0.0/16")
end
test "create_vpc with amazon provided cidr block enabled and instance tenancy set" do
expected = build_query(:create_vpc, %{
"CidrBlock" => "10.0.0.0/16",
"InstanceTenancy" => "dedicated",
"AmazonProvidedIpv6CidrBlock" => true
})
assert expected == EC2.create_vpc("10.0.0.0/16",
[instance_tenancy: "dedicated", amazon_provided_ipv6_cidr_block: true])
end
test "delete_vpc" do
expected = build_query(:delete_vpc, %{
"VpcId" => "vpc-1a2b3c4d"
})
assert expected == EC2.delete_vpc("vpc-1a2b3c4d")
end
test "delete_vpc with dry_run" do
expected = build_query(:delete_vpc, %{
"VpcId" => "vpc-1a2b3c4d",
"DryRun" => true
})
assert expected == EC2.delete_vpc("vpc-1a2b3c4d", [dry_run: true])
end
test "describe_vpc_attribute" do
expected = build_query(:describe_vpc_attribute, %{
"VpcId" => "vpc-1a2b3c4d",
"Attribute" => "enableDnsSupport"
})
assert expected == EC2.describe_vpc_attribute("vpc-1a2b3c4d", "enableDnsSupport")
end
test "modify_vpc_attribute with enable_dns_hostnames and enable_dns_support" do
expected = build_query(:modify_vpc_attribute, %{
"VpcId" => "vpc-1a2b3c4d",
"EnableDnsHostnames.Value" => true,
"EnableDnsSupport.Value" => true
})
assert expected == EC2.modify_vpc_attribute("vpc-1a2b3c4d",
[enable_dns_hostnames: true, enable_dns_support: true]
)
end
#################
# Subnets Tests #
#################
test "describe_subnets" do
expected = build_query(:describe_subnets, %{})
assert expected == EC2.describe_subnets
end
test "describe_subnets filters" do
expected = build_query(:describe_subnets, %{
"Filter.1.Name" => "vpc-id",
"Filter.1.Value.1" => "vpc-1a2b3c4d",
"Filter.1.Value.2" => "vpc-6e7f8a92",
"Filter.2.Name" => "state",
"Filter.2.Value.1" => "available"
})
assert expected == EC2.describe_subnets([
filters: [
"vpc-id": ["vpc-1a2b3c4d", "vpc-6e7f8a92"],
"state": ["available"]
]
])
end
test "describe_subnets with subnets" do
expected = build_query(:describe_subnets, %{
"SubnetId.1" => "subnet-9d4a7b6c",
"SubnetId.2" => "subnet-6e7f829e"
})
assert expected == EC2.describe_subnets([
subnet_ids: ["subnet-9d4a7b6c", "subnet-6e7f829e"]
])
end
test "create_subnet" do
expected = build_query(:create_subnet, %{
"VpcId" => "vpc-1a2b3c4d",
"CidrBlock" => "10.0.1.0/24"
})
assert expected == EC2.create_subnet("vpc-1a2b3c4d", "10.0.1.0/24")
end
test "create_subnet with an IPv6 CIDR block" do
expected = build_query(:create_subnet, %{
"VpcId" => "vpc-1a2b3c4d",
"CidrBlock" => "10.0.1.0/24",
"Ipv6CidrBlock" => "2001:db8:1234:1a00::/64"
})
assert expected == EC2.create_subnet("vpc-1a2b3c4d", "10.0.1.0/24",
[ipv6_cidr_block: "2001:db8:1234:1a00::/64"])
end
test "delete_subnet" do
expected = build_query(:delete_subnet, %{
"SubnetId" => "subnet-9d4a7b6c"
})
assert expected == EC2.delete_subnet("subnet-9d4a7b6c")
end
test "delete_subnet with dry_run" do
expected = build_query(:delete_subnet, %{
"SubnetId" => "subnet-9d4a7b6c",
"DryRun" => true
})
assert expected == EC2.delete_subnet("subnet-9d4a7b6c", [dry_run: true])
end
test "modify_subnet_attribute with map_public_ip_on_launch" do
expected = build_query(:modify_subnet_attribute, %{
"SubnetId" => "subnet-9d4a7b6c",
"MapPublicIpOnLaunch.Value" => true
})
assert expected == EC2.modify_subnet_attribute("subnet-9d4a7b6c",
[map_public_ip_on_launch: true])
end
test "modify_subnet_attriute with assign_ipv6_address_on_creation" do
expected = build_query(:modify_subnet_attribute, %{
"SubnetId" => "subnet-9d4a7b6c",
"AssignIpv6AddressOnCreation.Value" => true
})
assert expected == EC2.modify_subnet_attribute("subnet-9d4a7b6c",
[assign_ipv6_address_on_creation: true])
end
##################
# Key Pair Tests #
##################
test "describe_key_pairs" do
expected = build_query(:describe_key_pairs, %{})
assert expected == EC2.describe_key_pairs
end
test "describe_key_pairs with filters" do
expected = build_query(:describe_key_pairs, %{
"Filter.1.Name" => "key-name",
"Filter.1.Value.1" => "*Dave*"
})
assert expected == EC2.describe_key_pairs([
filters: ["key-name": ["*Dave*"]]
])
end
test "describe_key_pairs with key_names" do
expected = build_query(:describe_key_pairs, %{
"KeyName.1" => "test-key-pair",
"KeyName.2" => "that-key-pair"
})
assert expected == EC2.describe_key_pairs([
key_names: ["test-key-pair", "that-key-pair"]
])
end
test "create_key_pair with dry_run" do
expected = build_query(:create_key_pair, %{
"KeyName" => "test-key-pair",
"DryRun" => true
})
assert expected == EC2.create_key_pair("test-key-pair",
[dry_run: true])
end
test "delete_key_pair with dry_run" do
expected = build_query(:delete_key_pair, %{
"KeyName" => "test-key-pair",
"DryRun" => true
})
assert expected == EC2.delete_key_pair("test-key-pair",
[dry_run: true])
end
test "import_key_pair no options" do
expected = build_query(:import_key_pair, %{
"KeyName" => "test-key-pair",
"PublicKeyMaterial" => Base.url_encode64("test")
})
assert expected == EC2.import_key_pair(
"test-key-pair",
Base.url_encode64("test"))
end
test "import_key_pair with dry_run" do
expected = build_query(:import_key_pair, %{
"KeyName" => "test-key-pair",
"PublicKeyMaterial" => Base.url_encode64("test"),
"DryRun" => true
})
assert expected == EC2.import_key_pair(
"test-key-pair", Base.url_encode64("test"),
[dry_run: true])
end
#########################
# Security Groups Tests #
#########################
test "describe_security_groups no options" do
expected = build_query(:describe_security_groups, %{})
assert expected == EC2.describe_security_groups
end
test "describe_security_groups with group_names" do
expected = build_query(:describe_security_groups, %{
"GroupName.1" => "Test",
"GroupName.2" => "WebServer"
})
assert expected == EC2.describe_security_groups(
[group_names: ["Test", "WebServer"]]
)
end
test "describe_security_groups with filters" do
expected = build_query(:describe_security_groups, %{
"Filter.1.Name" => "ip-permission.protocol",
"Filter.1.Value.1" => "tcp",
"Filter.2.Name" => "ip-permission.from-port",
"Filter.2.Value.1" => "22",
"Filter.3.Name" => "ip-permission.to-port",
"Filter.3.Value.1" => "22",
"Filter.4.Name" => "ip-permission.group-name",
"Filter.4.Value.1" => "app_server_group",
"Filter.4.Value.2" => "database_group"
})
assert expected == EC2.describe_security_groups(
[filters: ["ip-permission.protocol": ["tcp"],
"ip-permission.from-port": ["22"],
"ip-permission.to-port": ["22"],
"ip-permission.group-name": ["app_server_group", "database_group"]
]
])
end
test "describe_security_groups with group_ids" do
expected = build_query(:describe_security_groups, %{
"GroupId.1" => "sg-9bf6ceff",
"GroupId.2" => "sg-12345678"
})
assert expected == EC2.describe_security_groups(
[group_ids: ["sg-9bf6ceff", "sg-12345678"]]
)
end
test "create_security_group with no options" do
expected = build_query(:create_security_group, %{
"GroupName" => "Test",
"GroupDescription" => "Test Description"
})
assert expected == EC2.create_security_group("Test", "Test Description")
end
test "create_security_group with vpc_id" do
expected = build_query(:create_security_group, %{
"GroupName" => "Test",
"GroupDescription" => "Test Description",
"VpcId" => "vpc-3325caf2"
})
assert expected == EC2.create_security_group("Test", "Test Description",
[vpc_id: "vpc-3325caf2"])
end
test "authorize_security_group_ingress with ip_permissions and group_name" do
expected = build_query(:authorize_security_group_ingress, %{
"GroupName" => "websrv",
"IpPermissions.1.IpProtocol" => "tcp",
"IpPermissions.1.FromPort" => 22,
"IpPermissions.1.ToPort" => 22,
"IpPermissions.1.IpRanges.1.CidrIp" => "192.0.2.0/24",
"IpPermissions.1.IpRanges.2.CidrIp" => "198.51.100.0/24"
})
assert expected == EC2.authorize_security_group_ingress([group_name: "websrv",
ip_permissions: [
[ip_protocol: "tcp", from_port: 22, to_port: 22, ip_ranges: [
[cidr_ip: "192.0.2.0/24"], [cidr_ip: "198.51.100.0/24"]
]]
]])
end
test "authorize_security_group_egress with ip_permissions" do
expected = build_query(:authorize_security_group_egress, %{
"GroupId" => "sg-9a8d7f5c",
"IpPermissions.1.IpProtocol" => "udp",
"IpPermissions.1.FromPort" => 22,
"IpPermissions.1.ToPort" => 22,
"IpPermissions.1.Ipv6Ranges.1.CidrIpv6" => "2001:db8:1234:1a00::/64",
"IpPermissions.1.UserIdGroupPairs.1.GroupId" => "sg-987654",
"IpPermissions.1.UserIdGroupPairs.1.GroupName" => "test"
})
assert expected == EC2.authorize_security_group_egress("sg-9a8d7f5c", [
ip_permissions: [
[ip_protocol: "udp", from_port: 22, to_port: 22, ipv6_ranges: [[cidr_ipv6: "2001:db8:1234:1a00::/64"]],
user_id_group_pairs: [
[group_id: "sg-987654", group_name: "test"]
]
]
]
])
end
test "revoke_security_group_ingress with ip_permissions and group_name" do
expected = build_query(:revoke_security_group_ingress, %{
"GroupName" => "websrv",
"IpPermissions.1.IpProtocol" => "tcp",
"IpPermissions.1.FromPort" => 80,
"IpPermissions.1.ToPort" => 80,
"IpPermissions.1.Ipv6Ranges.1.CidrIpv6" => "2001:db8:1234:1a00::/64",
"IpPermissions.1.Ipv6Ranges.2.CidrIpv6" => "2001:db9:1234:1a00::/64"
})
assert expected == EC2.revoke_security_group_ingress([group_name: "websrv",
ip_permissions: [
[ip_protocol: "tcp", from_port: 80, to_port: 80, ipv6_ranges: [
[cidr_ipv6: "2001:db8:1234:1a00::/64"],
[cidr_ipv6: "2001:db9:1234:1a00::/64"]
]]
]
])
end
test "revoke_security_group_egress with cidr_ip" do
expected = build_query(:revoke_security_group_egress, %{
"GroupId" => "websrv",
"CidrIp" => "TestCidrIp"
})
assert expected == EC2.revoke_security_group_egress("websrv", [cidr_ip: "TestCidrIp"])
end
end
| 29.837888 | 153 | 0.622695 |
792c9a16301e1b983ca90f8ce3e009c95d657d76 | 10,177 | ex | Elixir | lib/absinthe/plug.ex | mjason/absinthe_plug_cn | 230896e5a01b751076b12aa157ca84da0f650acd | [
"BSD-3-Clause"
] | null | null | null | lib/absinthe/plug.ex | mjason/absinthe_plug_cn | 230896e5a01b751076b12aa157ca84da0f650acd | [
"BSD-3-Clause"
] | null | null | null | lib/absinthe/plug.ex | mjason/absinthe_plug_cn | 230896e5a01b751076b12aa157ca84da0f650acd | [
"BSD-3-Clause"
] | null | null | null | defmodule Absinthe.Plug do
@moduledoc """
A plug for using [Absinthe](https://hex.pm/packages/absinthe) (GraphQL).
## Usage
In your router:
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json, Absinthe.Plug.Parser],
pass: ["*/*"],
json_decoder: Poison
plug Absinthe.Plug,
schema: MyApp.Schema
If you want only `Absinthe.Plug` to serve a particular route, configure your
router like:
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json, Absinthe.Plug.Parser],
pass: ["*/*"],
json_decoder: Poison
forward "/api", Absinthe.Plug,
schema: MyApp.Schema
See the documentation on `Absinthe.Plug.init/1` and the `Absinthe.Plug.opts`
type for information on the available options.
To add support for a GraphiQL interface, add a configuration for
`Absinthe.Plug.GraphiQL`:
forward "/graphiql",
Absinthe.Plug.GraphiQL,
schema: MyApp.Schema,
## Included GraphQL Types
This package includes additional types for use in Absinthe GraphQL schema and
type modules.
See the documentation on `Absinthe.Plug.Types` for more information.
## More Information
For more on configuring `Absinthe.Plug` and how GraphQL requests are made,
see [the guide](http://absinthe-graphql.org/guides/plug-phoenix/) at
<http://absinthe-graphql.org>.
"""
@behaviour Plug
import Plug.Conn
require Logger
alias __MODULE__.Request
@raw_options [:analyze_complexity, :max_complexity]
@type function_name :: atom
@typedoc """
- `:adapter` -- (Optional) Absinthe adapter to use (default: `Absinthe.Adapter.LanguageConventions`).
- `:context` -- (Optional) Initial value for the Absinthe context, available to resolvers. (default: `%{}`).
- `:no_query_message` -- (Optional) Message to return to the client if no query is provided (default: "No query document supplied").
- `:json_codec` -- (Optional) A `module` or `{module, Keyword.t}` dictating which JSON codec should be used (default: `Poison`). The codec module should implement `encode!/2` (e.g., `module.encode!(body, opts)`).
- `:pipeline` -- (Optional) `{module, atom}` reference to a 2-arity function that will be called to generate the processing pipeline. (default: `{Absinthe.Plug, :default_pipeline}`).
- `:document_providers` -- (Optional) A `{module, atom}` reference to a 1-arity function that will be called to determine the document providers that will be used to process the request. (default: `{Absinthe.Plug, :default_document_providers}`, which configures `Absinthe.Plug.DocumentProvider.Default` as the lone document provider). A simple list of document providers can also be given. See `Absinthe.Plug.DocumentProvider` for more information about document providers, their role in procesing requests, and how you can define and configure your own.
- `:schema` -- (Required, if not handled by Mix.Config) The Absinthe schema to use. If a module name is not provided, `Application.get_env(:absinthe, :schema)` will be attempt to find one.
"""
@type opts :: [
schema: module,
adapter: module,
context: map,
json_codec: module | {module, Keyword.t},
pipeline: {module, atom},
no_query_message: String.t,
document_providers: [Absinthe.Plug.DocumentProvider.t, ...] | Absinthe.Plug.DocumentProvider.t | {module, atom},
analyze_complexity: boolean,
max_complexity: non_neg_integer | :infinity,
]
@doc """
Serve an Absinthe GraphQL schema with the specified options.
## Options
See the documentation for the `Absinthe.Plug.opts` type for details on the available options.
"""
@spec init(opts :: opts) :: map
def init(opts) do
adapter = Keyword.get(opts, :adapter, Absinthe.Adapter.LanguageConventions)
context = Keyword.get(opts, :context, %{})
no_query_message = Keyword.get(opts, :no_query_message, "No query document supplied")
pipeline = Keyword.get(opts, :pipeline, {__MODULE__, :default_pipeline})
document_providers = Keyword.get(opts, :document_providers, {__MODULE__, :default_document_providers})
json_codec = case Keyword.get(opts, :json_codec, Poison) do
module when is_atom(module) -> %{module: module, opts: []}
other -> other
end
schema_mod = opts |> get_schema
raw_options = Keyword.take(opts, @raw_options)
%{
adapter: adapter,
context: context,
document_providers: document_providers,
json_codec: json_codec,
no_query_message: no_query_message,
pipeline: pipeline,
raw_options: raw_options,
schema_mod: schema_mod,
}
end
defp get_schema(opts) do
default = Application.get_env(:absinthe, :schema)
schema = Keyword.get(opts, :schema, default)
try do
Absinthe.Schema.types(schema)
rescue
UndefinedFunctionError ->
raise ArgumentError, "The supplied schema: #{inspect schema} is not a valid Absinthe Schema"
end
schema
end
@doc """
Parses, validates, resolves, and executes the given Graphql Document
"""
@spec call(Plug.Conn.t, map) :: Plug.Conn.t | no_return
def call(conn, %{json_codec: json_codec} = config) do
{conn, result} = conn |> execute(config)
case result do
{:input_error, msg} ->
conn
|> send_resp(400, msg)
{:ok, %{data: _} = result} ->
conn
|> json(200, result, json_codec)
{:ok, %{errors: _} = result} ->
conn
|> json(400, result, json_codec)
{:ok, result} when is_list(result) ->
conn
|> json(200, result, json_codec)
{:error, {:http_method, text}, _} ->
conn
|> send_resp(405, text)
{:error, error, _} when is_binary(error) ->
conn
|> send_resp(500, error)
end
end
@doc false
@spec execute(Plug.Conn.t, map) :: {Plug.Conn.t, any}
def execute(conn, config) do
conn_info = %{
conn_private: (conn.private[:absinthe] || %{}) |> Map.put(:http_method, conn.method),
}
with {:ok, conn, request} <- Request.parse(conn, config),
{:ok, request} <- ensure_processable(request, config) do
{conn, run_request(request, conn_info, config)}
else
result ->
{conn, result}
end
end
@doc false
@spec ensure_processable(Request.t, map) :: {:ok, Request.t} | {:input_error, String.t}
def ensure_processable(request, config) do
with {:ok, request} <- ensure_documents(request, config) do
ensure_document_provider(request)
end
end
@spec ensure_documents(Request.t, map) :: {:ok, Request.t} | {:input_error, String.t}
defp ensure_documents(%{queries: []}, config) do
{:input_error, config.no_query_message}
end
defp ensure_documents(%{queries: queries} = request, config) do
Enum.reduce_while(queries, {:ok, request}, fn query, _acc ->
query_status = case query do
{:input_error, error_msg} -> {:input_error, error_msg}
query -> ensure_document(query, config)
end
case query_status do
{:ok, _query} -> {:cont, {:ok, request}}
{:input_error, error_msg} -> {:halt, {:input_error, error_msg}}
end
end)
end
@spec ensure_document(Request.t, map) :: {:ok, Request.t} | {:input_error, String.t}
defp ensure_document(%{document: nil}, config) do
{:input_error, config.no_query_message}
end
defp ensure_document(%{document: _} = query, _) do
{:ok, query}
end
@spec ensure_document_provider(Request.t) :: {:ok, Request.t} | {:input_error, String.t}
defp ensure_document_provider(%{queries: queries} = request) do
if Enum.all?(queries, &Map.has_key?(&1, :document_provider)) do
{:ok, request}
else
{:input_error, "No document provider found to handle this request"}
end
end
def run_request(%{batch: true, queries: queries} = request, conn, config) do
Request.log(request)
results =
queries
|> Absinthe.Plug.Batch.Runner.run(conn, config)
|> Enum.zip(request.extra_keys)
|> Enum.map(fn {result, extra_keys} ->
Map.merge(extra_keys, %{
payload: result
})
end)
{:ok, results}
end
def run_request(%{batch: false, queries: [query]} = request, conn_info, config) do
Request.log(request)
run_query(query, conn_info, config)
end
def run_query(query, conn_info, config) do
%{document: document, pipeline: pipeline} = Request.Query.add_pipeline(query, conn_info, config)
with {:ok, %{result: result}, _} <- Absinthe.Pipeline.run(document, pipeline) do
{:ok, result}
end
end
#
# PIPELINE
#
@doc """
The default pipeline used to process GraphQL documents.
This consists of Absinthe's default pipeline (as returned by `Absinthe.Pipeline.for_document/1`),
with the `Absinthe.Plug.Validation.HTTPMethod` phase inserted to ensure that the correct
HTTP verb is being used for the GraphQL operation type.
"""
@spec default_pipeline(map, Keyword.t) :: Absinthe.Pipeline.t
def default_pipeline(config, pipeline_opts) do
config.schema_mod
|> Absinthe.Pipeline.for_document(pipeline_opts)
|> Absinthe.Pipeline.insert_after(Absinthe.Phase.Document.CurrentOperation,
{Absinthe.Plug.Validation.HTTPMethod, method: config.conn_private.http_method}
)
end
#
# DOCUMENT PROVIDERS
#
@doc """
The default list of document providers that are enabled.
This consists of a single document provider, `Absinthe.Plug.DocumentProvider.Default`, which
supports ad hoc GraphQL documents provided directly within the request.
For more information about document providers, see `Absinthe.Plug.DocumentProvider`.
"""
@spec default_document_providers(map) :: [Absinthe.Plug.DocumentProvider.t]
def default_document_providers(_) do
[Absinthe.Plug.DocumentProvider.Default]
end
#
# SERIALIZATION
#
@doc false
@spec json(Plug.Conn.t, 200 | 400 | 405 | 500, String.t, map) :: Plug.Conn.t | no_return
def json(conn, status, body, json_codec) do
conn
|> put_resp_content_type("application/json")
|> send_resp(status, json_codec.module.encode!(body, json_codec.opts))
end
end
| 33.25817 | 556 | 0.675838 |
792caa85dcab79b4a8662158c5e09a21be8e07c5 | 2,422 | ex | Elixir | lib/commodity_api/generic/plug/public_ip.ex | akdilsiz/commodity-cloud | 08c366c9fc95fbb3565131672db4cc52f8b870c9 | [
"Apache-2.0"
] | 7 | 2019-04-11T21:12:49.000Z | 2021-04-14T12:56:42.000Z | lib/commodity_api/generic/plug/public_ip.ex | akdilsiz/commodity-cloud | 08c366c9fc95fbb3565131672db4cc52f8b870c9 | [
"Apache-2.0"
] | null | null | null | lib/commodity_api/generic/plug/public_ip.ex | akdilsiz/commodity-cloud | 08c366c9fc95fbb3565131672db4cc52f8b870c9 | [
"Apache-2.0"
] | 2 | 2019-06-06T18:05:33.000Z | 2019-07-16T08:49:45.000Z | ##
# https://www.cogini.com/blog/getting-the-client-public-ip-address-in-phoenix/
##
defmodule Commodity.Api.Generic.Plug.PublicIp do
@moduledoc """
Get public IP address of request from x-forwarded-for header
"""
use Commodity.Api, :plug
def init(opts), do: opts
def call(%{assigns: %{ip: _}} = conn, _opts), do: conn
def call(conn, _opts) do
process(conn, get_req_header(conn, "x-forwarded-for"))
end
def process(conn, []) do
conn
|> assign(:ip, to_string(:inet.ntoa(get_peer_ip(conn))))
end
def process(conn, vals) do
if Application.get_env(:commodity, :trust_x_forwarded_for, false) do
ip_address = get_ip_address(conn, vals)
# Rewrite standard remote_ip field with value from header
# See https://hexdocs.pm/plug/Plug.Conn.html
conn = %{conn | remote_ip: ip_address}
conn
|> assign(:ip, to_string(:inet.ntoa(ip_address)))
else
conn
|> assign(:ip, to_string(:inet.ntoa(get_peer_ip(conn))))
end
end
defp get_ip_address(conn, vals)
defp get_ip_address(conn, []), do: get_peer_ip(conn)
defp get_ip_address(conn, [val | _]) do
# Split into multiple values
comps = val
|> String.split(~r{\s*,\s*}, trim: true)
|> Enum.filter(&(&1 != "unknown")) # Get rid of "unknown" values
|> Enum.map(&(hd(String.split(&1, ":")))) # Split IP from port, if any
|> Enum.filter(&(&1 != "")) # Filter out blanks
|> Enum.map(&(parse_address(&1))) # Parse address into :inet.ip_address tuple
|> Enum.filter(&(is_public_ip(&1))) # Elminate internal IP addreses, e.g. 192.168.1.1
case comps do
[] -> get_peer_ip(conn)
[comp | _] -> comp
end
end
@spec get_peer_ip(Plug.Conn.t) :: :inet.ip_address
defp get_peer_ip(conn) do
conn.remote_ip
end
@spec parse_address(String.t) :: :inet.ip_address
defp parse_address(ip) do
case :inet.parse_ipv4strict_address(to_charlist(ip)) do
{:ok, ip_address} -> ip_address
{:error, :einval} -> :einval
end
end
# Whether the input is a valid, public IP address
# http://en.wikipedia.org/wiki/Private_network
@spec is_public_ip(:inet.ip_address | atom) :: boolean
defp is_public_ip(ip_address) do
case ip_address do
{10, _, _, _} -> false
{192, 168, _, _} -> false
{172, second, _, _} when second >= 16 and second <= 31 -> false
{127, 0, 0, _} -> false
{_, _, _, _} -> true
:einval -> false
end
end
end | 29.180723 | 96 | 0.644509 |
792d1b54caa055483e6da312f4b394b24089328e | 21,454 | ex | Elixir | lib/liquex/filter.ex | inspired-consulting/liquex | 2ba530727aa87a93dbe9b981b4a90e962b34f4a3 | [
"MIT"
] | 19 | 2020-02-29T01:37:11.000Z | 2022-03-15T06:45:20.000Z | lib/liquex/filter.ex | inspired-consulting/liquex | 2ba530727aa87a93dbe9b981b4a90e962b34f4a3 | [
"MIT"
] | 19 | 2020-09-02T19:35:08.000Z | 2022-03-31T21:42:16.000Z | lib/liquex/filter.ex | inspired-consulting/liquex | 2ba530727aa87a93dbe9b981b4a90e962b34f4a3 | [
"MIT"
] | 4 | 2020-10-20T08:22:43.000Z | 2022-01-19T17:21:32.000Z | defmodule Liquex.Filter do
@moduledoc """
Contains all the basic filters for Liquid
"""
@type filter_t :: {:filter, [...]}
@callback apply(any, filter_t, map) :: any
alias Liquex.Context
defmacro __using__(_) do
quote do
@behaviour Liquex.Filter
@spec apply(any, Liquex.Filter.filter_t(), map) :: any
@impl Liquex.Filter
def apply(value, filter, context),
do: Liquex.Filter.apply(__MODULE__, value, filter, context)
end
end
@spec filter_name(filter_t) :: String.t()
def filter_name({:filter, [filter_name | _]}), do: filter_name
def apply(
mod \\ __MODULE__,
value,
{:filter, [function, {:arguments, arguments}]},
context
) do
func = String.to_existing_atom(function)
function_args =
Enum.map(
arguments,
&Liquex.Argument.eval(&1, context)
)
|> merge_keywords()
mod =
if mod != __MODULE__ and Kernel.function_exported?(mod, func, length(function_args) + 2) do
mod
else
__MODULE__
end
Kernel.apply(mod, func, [value | function_args] ++ [context])
rescue
# credo:disable-for-next-line
ArgumentError -> raise Liquex.Error, "Invalid filter #{function}"
end
# Merges the tuples at the end of the argument list into a keyword list, but with string keys
# value, size, {"crop", direction}, {"filter", filter}
# becomes
# value, size, [{"crop", direction}, {"filter", filter}]
defp merge_keywords(arguments) do
{keywords, rest} =
arguments
|> Enum.reverse()
|> Enum.split_while(&is_tuple/1)
case keywords do
[] -> rest
_ -> [Enum.reverse(keywords) | rest]
end
|> Enum.reverse()
end
@doc """
Returns the absolute value of `value`.
## Examples
iex> Liquex.Filter.abs(-1, %{})
1
iex> Liquex.Filter.abs(1, %{})
1
iex> Liquex.Filter.abs("-1.1", %{})
1.1
"""
@spec abs(String.t() | number, any) :: number
def abs(value, _) when is_binary(value) do
{float, ""} = Float.parse(value)
abs(float)
end
def abs(value, _), do: abs(value)
@doc """
Appends `text` to the end of `value`
## Examples
iex> Liquex.Filter.append("myfile", ".html", %{})
"myfile.html"
"""
@spec append(String.t(), String.t(), map()) :: String.t()
def append(value, text, _), do: value <> text
@doc """
Sets a minimum value
## Examples
iex> Liquex.Filter.at_least(3, 5, %{})
5
iex> Liquex.Filter.at_least(5, 3, %{})
5
"""
@spec at_least(number, number, map()) :: number
def at_least(value, min, _) when value > min, do: value
def at_least(_value, min, _), do: min
@doc """
Sets a maximum value
## Examples
iex> Liquex.Filter.at_most(4, 5, %{})
4
iex> Liquex.Filter.at_most(4, 3, %{})
3
"""
@spec at_most(number, number, map()) :: number
def at_most(value, max, _) when value < max, do: value
def at_most(_value, max, _), do: max
@doc """
Capitalizes a string
## Examples
iex> Liquex.Filter.capitalize("title", %{})
"Title"
iex> Liquex.Filter.capitalize("my great title", %{})
"My great title"
"""
@spec capitalize(String.t(), map()) :: String.t()
def capitalize(value, _), do: String.capitalize(value)
@doc """
Rounds `value` up to the nearest whole number. Liquid tries to convert the input to a number before the filter is applied.
## Examples
iex> Liquex.Filter.ceil(1.2, %{})
2
iex> Liquex.Filter.ceil(2.0, %{})
2
iex> Liquex.Filter.ceil(183.357, %{})
184
iex> Liquex.Filter.ceil("3.5", %{})
4
"""
@spec ceil(number | String.t(), map()) :: number
def ceil(value, _) when is_binary(value) do
{num, ""} = Float.parse(value)
Float.ceil(num) |> trunc()
end
def ceil(value, _), do: Float.ceil(value) |> trunc()
@doc """
Removes any nil values from an array.
## Examples
iex> Liquex.Filter.compact([1, 2, nil, 3], %{})
[1,2,3]
iex> Liquex.Filter.compact([1, 2, 3], %{})
[1,2,3]
"""
@spec compact([any], map()) :: [any]
def compact(value, _) when is_list(value),
do: Enum.reject(value, &is_nil/1)
@doc """
Concatenates (joins together) multiple arrays. The resulting array contains all the items
## Examples
iex> Liquex.Filter.concat([1,2], [3,4], %{})
[1,2,3,4]
"""
def concat(value, other, _) when is_list(value) and is_list(other),
do: value ++ other
@doc """
Converts `value` timestamp into another date `format`.
The format for this syntax is the same as strftime. The input uses the same format as Ruby’s Time.parse.
## Examples
iex> Liquex.Filter.date(~D[2000-01-01], "%m/%d/%Y", %{})
"01/01/2000"
iex> Liquex.Filter.date("2000-01-01", "%m/%d/%Y", %{})
"01/01/2000"
iex> Liquex.Filter.date("January 1, 2000", "%m/%d/%Y", %{})
"01/01/2000"
iex> Liquex.Filter.date("1/2/2000", "%m/%d/%Y", %{})
"01/02/2000"
iex> Liquex.Filter.date("March 14, 2016", "%b %d, %y", %{})
"Mar 14, 16"
"""
def date(%Date{} = value, format, _), do: Timex.format!(value, format, :strftime)
def date(%DateTime{} = value, format, _), do: Timex.format!(value, format, :strftime)
def date(%NaiveDateTime{} = value, format, _), do: Timex.format!(value, format, :strftime)
def date("now", format, context), do: date(DateTime.utc_now(), format, context)
def date("today", format, context), do: date(Date.utc_today(), format, context)
def date(value, format, context) when is_binary(value) do
# Thanks to the nonspecific definition of the format in the spec, we parse
# some common date formats
case DateTimeParser.parse_datetime(value, assume_time: true) do
{:ok, parsed_date} ->
parsed_date
|> NaiveDateTime.to_date()
|> date(format, context)
_ ->
nil
end
end
@doc """
Allows you to specify a fallback in case a value doesn’t exist. default will show its value
if the left side is nil, false, or empty.
## Examples
iex> Liquex.Filter.default("1.99", "2.99", %{})
"1.99"
iex> Liquex.Filter.default("", "2.99", %{})
"2.99"
"""
def default(value, def_value, _) when value in [nil, "", false, []], do: def_value
def default(value, _, _), do: value
@doc """
Divides a number by another number.
## Examples
The result is rounded down to the nearest integer (that is, the floor) if the divisor is an integer.
iex> Liquex.Filter.divided_by(16, 4, %{})
4
iex> Liquex.Filter.divided_by(5, 3, %{})
1
iex> Liquex.Filter.divided_by(20, 7.0, %{})
2.857142857142857
"""
def divided_by(value, divisor, _) when is_integer(divisor), do: trunc(value / divisor)
def divided_by(value, divisor, _), do: value / divisor
@doc """
Makes each character in a string lowercase. It has no effect on strings
which are already all lowercase.
## Examples
iex> Liquex.Filter.downcase("Parker Moore", %{})
"parker moore"
iex> Liquex.Filter.downcase("apple", %{})
"apple"
"""
def downcase(nil, _), do: nil
def downcase(value, _), do: String.downcase(value)
@doc """
Escapes a string by replacing characters with escape sequences (so that the string can
be used in a URL, for example). It doesn’t change strings that don’t have anything to
escape.
## Examples
iex> Liquex.Filter.escape("Have you read 'James & the Giant Peach'?", %{})
"Have you read 'James & the Giant Peach'?"
iex> Liquex.Filter.escape("Tetsuro Takara", %{})
"Tetsuro Takara"
"""
def escape(value, _),
do: HtmlEntities.encode(value)
@doc """
Escapes a string by replacing characters with escape sequences (so that the string can
be used in a URL, for example). It doesn’t change strings that don’t have anything to
escape.
## Examples
iex> Liquex.Filter.escape_once("1 < 2 & 3", %{})
"1 < 2 & 3"
"""
def escape_once(value, _),
do: value |> HtmlEntities.decode() |> HtmlEntities.encode()
@doc """
Returns the first item of an array.
## Examples
iex> Liquex.Filter.first([1, 2, 3], %{})
1
iex> Liquex.Filter.first([], %{})
nil
"""
def first([], _), do: nil
def first([f | _], _), do: f
@doc """
Rounds the input down to the nearest whole number. Liquid tries to convert the input to a
number before the filter is applied.
## Examples
iex> Liquex.Filter.floor(1.2, %{})
1
iex> Liquex.Filter.floor(2.0, %{})
2
"""
def floor(value, _), do: Kernel.trunc(value)
@doc """
Combines the items in `values` into a single string using `joiner` as a separator.
## Examples
iex> Liquex.Filter.join(~w(John Paul George Ringo), " and ", %{})
"John and Paul and George and Ringo"
"""
def join(values, joiner, _), do: Enum.join(values, joiner)
@doc """
Returns the last item of `arr`.
## Examples
iex> Liquex.Filter.last([1, 2, 3], %{})
3
iex> Liquex.Filter.first([], %{})
nil
"""
@spec last(list, Liquex.Context.t()) :: any
def last(arr, context), do: arr |> Enum.reverse() |> first(context)
@doc """
Removes all whitespace (tabs, spaces, and newlines) from the left side of a string.
It does not affect spaces between words.
## Examples
iex> Liquex.Filter.lstrip(" So much room for activities! ", %{})
"So much room for activities! "
"""
@spec lstrip(String.t(), Context.t()) :: String.t()
def lstrip(value, _), do: value |> String.trim_leading()
@doc """
Creates an array (`arr`) of values by extracting the values of a named property from another object (`key`).
## Examples
iex> Liquex.Filter.map([%{"a" => 1}, %{"a" => 2, "b" => 1}], "a", %{})
[1, 2]
"""
@spec map([any], term, Context.t()) :: [any]
def map(arr, key, _), do: Enum.map(arr, &Liquex.Indifferent.get(&1, key, nil))
@doc """
Subtracts a number from another number.
## Examples
iex> Liquex.Filter.minus(4, 2, %{})
2
iex> Liquex.Filter.minus(183.357, 12, %{})
171.357
"""
@spec minus(number, number, Context.t()) :: number
def minus(left, right, _), do: left - right
@doc """
Returns the remainder of a division operation.
## Examples
iex> Liquex.Filter.modulo(3, 2, %{})
1
iex> Liquex.Filter.modulo(183.357, 12, %{})
3.357
"""
@spec modulo(number, number, Context.t()) :: number
def modulo(left, right, _) when is_float(left) or is_float(right),
do: :math.fmod(left, right) |> Float.round(5)
def modulo(left, right, _), do: rem(left, right)
@doc """
Replaces every newline (\n) in a string with an HTML line break (<br />).
## Examples
iex> Liquex.Filter.newline_to_br("\\nHello\\nthere\\n", %{})
"<br />\\nHello<br />\\nthere<br />\\n"
"""
@spec newline_to_br(String.t(), Context.t()) :: String.t()
def newline_to_br(value, _), do: String.replace(value, "\n", "<br />\n")
@doc """
Adds a number to another number.
## Examples
iex> Liquex.Filter.plus(4, 2, %{})
6
iex> Liquex.Filter.plus(183.357, 12, %{})
195.357
"""
def plus(left, right, _), do: left + right
@doc """
Adds the specified string to the beginning of another string.
## Examples
iex> Liquex.Filter.prepend("apples, oranges, and bananas", "Some fruit: ", %{})
"Some fruit: apples, oranges, and bananas"
iex> Liquex.Filter.prepend("/index.html", "example.com", %{})
"example.com/index.html"
"""
def prepend(value, prepender, _), do: prepender <> value
@doc """
Removes every occurrence of the specified substring from a string.
## Examples
iex> Liquex.Filter.remove("I strained to see the train through the rain", "rain", %{})
"I sted to see the t through the "
"""
def remove(value, original, context), do: replace(value, original, "", context)
@doc """
Removes every occurrence of the specified substring from a string.
## Examples
iex> Liquex.Filter.remove_first("I strained to see the train through the rain", "rain", %{})
"I sted to see the train through the rain"
"""
def remove_first(value, original, context), do: replace_first(value, original, "", context)
@doc """
Replaces every occurrence of the first argument in a string with the second argument.
## Examples
iex> Liquex.Filter.replace("Take my protein pills and put my helmet on", "my", "your", %{})
"Take your protein pills and put your helmet on"
"""
def replace(value, original, replacement, _),
do: String.replace(value, original, replacement)
@doc """
Replaces only the first occurrence of the first argument in a string with the second argument.
## Examples
iex> Liquex.Filter.replace_first("Take my protein pills and put my helmet on", "my", "your", %{})
"Take your protein pills and put my helmet on"
"""
def replace_first(value, original, replacement, _),
do: String.replace(value, original, replacement, global: false)
@doc """
Reverses the order of the items in an array. reverse cannot reverse a string.
## Examples
iex> Liquex.Filter.reverse(~w(apples oranges peaches plums), %{})
["plums", "peaches", "oranges", "apples"]
"""
def reverse(arr, _) when is_list(arr), do: Enum.reverse(arr)
@doc """
Rounds a number to the nearest integer or, if a number is passed as an argument, to that number of decimal places.
## Examples
iex> Liquex.Filter.round(1, %{})
1
iex> Liquex.Filter.round(1.2, %{})
1
iex> Liquex.Filter.round(2.7, %{})
3
iex> Liquex.Filter.round(183.357, 2, %{})
183.36
"""
def round(value, precision \\ 0, context)
def round(value, _, _) when is_integer(value), do: value
def round(value, 0, _), do: value |> Float.round() |> trunc()
def round(value, precision, _), do: Float.round(value, precision)
@doc """
Removes all whitespace (tabs, spaces, and newlines) from the right side of a string.
It does not affect spaces between words.
## Examples
iex> Liquex.Filter.rstrip(" So much room for activities! ", %{})
" So much room for activities!"
"""
def rstrip(value, _), do: value |> String.trim_trailing()
@doc """
Returns the number of characters in a string or the number of items in an array.
## Examples
iex> Liquex.Filter.size("Ground control to Major Tom.", %{})
28
iex> Liquex.Filter.size(~w(apples oranges peaches plums), %{})
4
"""
def size(value, _) when is_list(value), do: length(value)
def size(value, _) when is_binary(value), do: String.length(value)
@doc """
Returns a substring of 1 character beginning at the index specified by the
first argument. An optional second argument specifies the length of the
substring to be returned.
## Examples
iex> Liquex.Filter.slice("Liquid", 0, %{})
"L"
iex> Liquex.Filter.slice("Liquid", 2, %{})
"q"
iex> Liquex.Filter.slice("Liquid", 2, 5, %{})
"quid"
If the first argument is a negative number, the indices are counted from
the end of the string:
## Examples
iex> Liquex.Filter.slice("Liquid", -3, 2, %{})
"ui"
"""
def slice(value, start, length \\ 1, _),
do: String.slice(value, start, length)
@doc """
Sorts items in an array in case-sensitive order.
## Examples
iex> Liquex.Filter.sort(["zebra", "octopus", "giraffe", "Sally Snake"], %{})
["Sally Snake", "giraffe", "octopus", "zebra"]
"""
def sort(list, _), do: Liquex.Collection.sort(list)
def sort(list, field_name, _), do: Liquex.Collection.sort(list, field_name)
@doc """
Sorts items in an array in case-insensitive order.
## Examples
iex> Liquex.Filter.sort_natural(["zebra", "octopus", "giraffe", "Sally Snake"], %{})
["giraffe", "octopus", "Sally Snake", "zebra"]
"""
def sort_natural(list, _), do: Liquex.Collection.sort_case_insensitive(list)
def sort_natural(list, field_name, _),
do: Liquex.Collection.sort_case_insensitive(list, field_name)
@doc """
Divides a string into an array using the argument as a separator. split is
commonly used to convert comma-separated items from a string to an array.
## Examples
iex> Liquex.Filter.split("John, Paul, George, Ringo", ", ", %{})
["John", "Paul", "George", "Ringo"]
"""
def split(value, separator, _), do: String.split(value, separator)
@doc """
Removes all whitespace (tabs, spaces, and newlines) from both the left and
right side of a string. It does not affect spaces between words.
## Examples
iex> Liquex.Filter.strip(" So much room for activities! ", %{})
"So much room for activities!"
"""
def strip(value, _), do: String.trim(value)
@doc """
Removes any HTML tags from a string.
## Examples
iex> Liquex.Filter.strip_html("Have <em>you</em> read <strong>Ulysses</strong>?", %{})
"Have you read Ulysses?"
"""
def strip_html(value, _), do: HtmlSanitizeEx.strip_tags(value)
@doc """
Removes any newline characters (line breaks) from a string.
## Examples
iex> Liquex.Filter.strip_newlines("Hello\\nthere", %{})
"Hellothere"
"""
def strip_newlines(value, _) do
value
|> String.replace("\r", "")
|> String.replace("\n", "")
end
@doc """
Multiplies a number by another number.
## Examples
iex> Liquex.Filter.times(3, 4, %{})
12
iex> Liquex.Filter.times(24, 7, %{})
168
iex> Liquex.Filter.times(183.357, 12, %{})
2200.284
"""
def times(value, divisor, _), do: value * divisor
@doc """
Shortens a string down to the number of characters passed as an argument. If
the specified number of characters is less than the length of the string, an
ellipsis (…) is appended to the string and is included in the character
count.
## Examples
iex> Liquex.Filter.truncate("Ground control to Major Tom.", 20, %{})
"Ground control to..."
iex> Liquex.Filter.truncate("Ground control to Major Tom.", 25, ", and so on", %{})
"Ground control, and so on"
iex> Liquex.Filter.truncate("Ground control to Major Tom.", 20, "", %{})
"Ground control to Ma"
"""
def truncate(value, length, ellipsis \\ "...", _) do
if String.length(value) <= length do
value
else
String.slice(
value,
0,
length - String.length(ellipsis)
) <> ellipsis
end
end
@doc """
Shortens a string down to the number of characters passed as an argument. If
the specified number of characters is less than the length of the string, an
ellipsis (…) is appended to the string and is included in the character
count.
## Examples
iex> Liquex.Filter.truncatewords("Ground control to Major Tom.", 3, %{})
"Ground control to..."
iex> Liquex.Filter.truncatewords("Ground control to Major Tom.", 3, "--", %{})
"Ground control to--"
iex> Liquex.Filter.truncatewords("Ground control to Major Tom.", 3, "", %{})
"Ground control to"
"""
def truncatewords(value, length, ellipsis \\ "...", _) do
words = value |> String.split()
if length(words) <= length do
value
else
sentence =
words
|> Enum.take(length)
|> Enum.join(" ")
sentence <> ellipsis
end
end
@doc """
Removes any duplicate elements in an array.
## Examples
iex> Liquex.Filter.uniq(~w(ants bugs bees bugs ants), %{})
["ants", "bugs", "bees"]
"""
def uniq(list, _), do: Enum.uniq(list)
@doc """
Makes each character in a string uppercase. It has no effect on strings
which are already all uppercase.
## Examples
iex> Liquex.Filter.upcase("Parker Moore", %{})
"PARKER MOORE"
iex> Liquex.Filter.upcase("APPLE", %{})
"APPLE"
"""
def upcase(value, _), do: String.upcase(value)
@doc """
Decodes a string that has been encoded as a URL or by url_encode/2.
## Examples
iex> Liquex.Filter.url_decode("%27Stop%21%27+said+Fred", %{})
"'Stop!' said Fred"
"""
def url_decode(value, _), do: URI.decode_www_form(value)
@doc """
Decodes a string that has been encoded as a URL or by url_encode/2.
## Examples
iex> Liquex.Filter.url_encode("[email protected]", %{})
"john%40liquid.com"
iex> Liquex.Filter.url_encode("Tetsuro Takara", %{})
"Tetsuro+Takara"
"""
def url_encode(value, _), do: URI.encode_www_form(value)
@doc """
Creates an array including only the objects with a given property value, or
any truthy value by default.
## Examples
iex> Liquex.Filter.where([%{"b" => 2}, %{"b" => 1}], "b", 1, %{})
[%{"b" => 1}]
"""
def where(list, key, value, _), do: Liquex.Collection.where(list, key, value)
@doc """
Creates an array including only the objects with a given truthy property value
## Examples
iex> Liquex.Filter.where([%{"b" => true, "value" => 1}, %{"b" => 1, "value" => 2}, %{"b" => false, "value" => 3}], "b", %{})
[%{"b" => true, "value" => 1}, %{"b" => 1, "value" => 2}]
"""
def where(list, key, _), do: Liquex.Collection.where(list, key)
end
| 26.71731 | 130 | 0.608232 |
792d2ee87946723a8df7f91695e37a4192e1a32e | 1,661 | ex | Elixir | lib/freshcom_web.ex | freshcom/freshcom_web | fab44c9468e86b1770eef9971a97ad2b11545e9c | [
"BSD-3-Clause"
] | 9 | 2018-12-16T14:02:59.000Z | 2021-01-19T07:25:40.000Z | lib/freshcom_web.ex | freshcom/freshcom_web | fab44c9468e86b1770eef9971a97ad2b11545e9c | [
"BSD-3-Clause"
] | null | null | null | lib/freshcom_web.ex | freshcom/freshcom_web | fab44c9468e86b1770eef9971a97ad2b11545e9c | [
"BSD-3-Clause"
] | 4 | 2018-12-16T17:50:01.000Z | 2021-01-19T07:25:51.000Z | defmodule FreshcomWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use FreshcomWeb, :controller
use FreshcomWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: FreshcomWeb
import Plug.Conn
import FreshcomWeb.Router.Helpers
import FreshcomWeb.Gettext
import FreshcomWeb.Controller
alias Freshcom.Request
end
end
def view do
quote do
use Phoenix.View, root: "lib/freshcom_web/templates",
namespace: FreshcomWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 2, view_module: 1]
import FreshcomWeb.Router.Helpers
import FreshcomWeb.ErrorHelpers
import FreshcomWeb.Gettext
import FreshcomWeb.Normalization, only: [camelize: 1]
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import FreshcomWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 24.072464 | 69 | 0.693558 |
792d64886f1666823b636dc9d9df1901f539eec1 | 1,891 | exs | Elixir | test/tesla/middleware/retry_with_backoff_test.exs | procore/elixir-sdk | abff2935702a5e9f0290a072e90c2f219bca3cb8 | [
"MIT"
] | 6 | 2018-02-01T01:34:16.000Z | 2020-08-31T15:15:08.000Z | test/tesla/middleware/retry_with_backoff_test.exs | procore/elixir-sdk | abff2935702a5e9f0290a072e90c2f219bca3cb8 | [
"MIT"
] | 6 | 2018-02-03T03:01:48.000Z | 2021-06-08T18:39:40.000Z | test/tesla/middleware/retry_with_backoff_test.exs | procore/elixir-sdk | abff2935702a5e9f0290a072e90c2f219bca3cb8 | [
"MIT"
] | 3 | 2018-08-12T03:51:52.000Z | 2020-02-27T14:29:14.000Z | defmodule Tesla.Middleware.RetryWithBackoffTest do
use ExUnit.Case
defmodule RetryTestAdapter do
def start_link, do: Agent.start_link(fn -> 0 end, name: __MODULE__)
def call(env, _opts) do
Agent.get_and_update(__MODULE__, fn retries ->
response =
case env.url do
"/success" -> {:ok, Map.put(env, :status, 200)}
"/404" -> {:ok, Map.put(env, :status, 404)}
"/retry" when retries < 2 -> {:error, :econnrefused}
"/retry" -> {:ok, Map.put(env, :status, 200)}
"/retry_504_fail" -> {:ok, Map.put(env, :status, 504)}
"/retry_503_fail" -> {:ok, Map.put(env, :status, 503)}
end
{response, retries + 1}
end)
end
end
defmodule RetryTestClient do
use Tesla
adapter(Tesla.Adapter.Hackney, recv_timeout: 30_000)
plug(Tesla.Middleware.HandleResponse)
plug(Tesla.Middleware.RetryWithBackoff, delay: 10, max_retries: 3)
adapter(RetryTestAdapter)
end
setup do
{:ok, _} = RetryTestAdapter.start_link()
:ok
end
test "no retry when successful" do
assert %Procore.ResponseResult{reply: :ok, status_code: 200} = RetryTestClient.get("/success")
end
test "no retry when 4**" do
assert %Procore.ResponseResult{reply: :unrecognized_code, status_code: 404} =
RetryTestClient.get("/404")
end
test "will retry and pass eventually" do
assert %Procore.ResponseResult{reply: :ok, status_code: 200} = RetryTestClient.get("/retry")
end
test "will retry 504 and fail eventually" do
assert %Procore.ErrorResult{reply: :error, reason: :gateway_timeout} =
RetryTestClient.get("/retry_504_fail")
end
test "will retry 503 and fail eventually" do
assert %Procore.ErrorResult{reply: :error, reason: :gateway_unavailable} =
RetryTestClient.get("/retry_503_fail")
end
end
| 30.5 | 98 | 0.644632 |
792da38f207093dd59773be0f8c3cbcae09c167c | 2,118 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/machine_type_list_warning.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/machine_type_list_warning.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/compute/lib/google_api/compute/v1/model/machine_type_list_warning.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.MachineTypeListWarning do
@moduledoc """
[Output Only] Informational warning message.
## Attributes
* `code` (*type:* `String.t`, *default:* `nil`) - [Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.
* `data` (*type:* `list(GoogleApi.Compute.V1.Model.MachineTypeListWarningData.t)`, *default:* `nil`) - [Output Only] Metadata about this warning in key: value format. For example:
"data": [ { "key": "scope", "value": "zones/us-east1-d" }
* `message` (*type:* `String.t`, *default:* `nil`) - [Output Only] A human-readable description of the warning code.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:code => String.t(),
:data => list(GoogleApi.Compute.V1.Model.MachineTypeListWarningData.t()),
:message => String.t()
}
field(:code)
field(:data, as: GoogleApi.Compute.V1.Model.MachineTypeListWarningData, type: :list)
field(:message)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.MachineTypeListWarning do
def decode(value, options) do
GoogleApi.Compute.V1.Model.MachineTypeListWarning.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.MachineTypeListWarning do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.222222 | 194 | 0.720963 |
792da51b39cb39d633ce8cfb46bce72affd6cb61 | 2,230 | exs | Elixir | mix.exs | Gamecode-HQ/sfmt-erlang | 62b1a1e25fce9b44f8b3a9ea9d458d751c57cf1b | [
"BSD-3-Clause"
] | null | null | null | mix.exs | Gamecode-HQ/sfmt-erlang | 62b1a1e25fce9b44f8b3a9ea9d458d751c57cf1b | [
"BSD-3-Clause"
] | null | null | null | mix.exs | Gamecode-HQ/sfmt-erlang | 62b1a1e25fce9b44f8b3a9ea9d458d751c57cf1b | [
"BSD-3-Clause"
] | null | null | null | defmodule Mix.Tasks.Compile.Sfmt do
@shortdoc "Compiles Sfmt"
def run(_) do
File.mkdir("priv")
{exec, args} = {"make", ["all"]}
if System.find_executable(exec) do
build(exec, args)
Mix.Project.build_structure
:ok
else
nocompiler_error(exec)
end
end
def build(exec, args) do
{result, error_code} = System.cmd(exec, args, stderr_to_stdout: true)
IO.binwrite result
if error_code != 0, do: build_error(exec)
end
defp nocompiler_error(exec) do
raise Mix.Error, message: nocompiler_message(exec)
end
defp build_error(exec) do
raise Mix.Error, message: build_message(exec)
end
defp nocompiler_message(exec) do
"""
Could not find the compiler program `#{exec}`.
"""
end
defp build_message(exec) do
"""
Could not build the program with `#{exec}`.
"""
end
end
defmodule Mix.Tasks.Edoc do
@shortdoc "Make docs using edoc on erlang.mk"
def run(_) do
{result, _error_code} = System.cmd("make", ["docs"], stderr_to_stdout: true)
Mix.shell.info result
:ok
end
end
defmodule Sfmt.Mixfile do
use Mix.Project
def project do
[app: :sfmt,
version: "0.13.0",
description: description,
package: package,
compilers: [:sfmt] ++ Mix.compilers,
aliases: [docs: ["edoc"]],
deps: deps]
end
def application do
[applications: [:kernel, :stdlib, :logger]]
end
defp deps do
[]
end
defp description do
"""
SIMD-oriented Fast Mersenne Twister (SFMT) for Erlang.
"""
end
defp package do
[files: [
"c_src",
"doc/overview.edoc",
"reference_texts",
"rebar.config",
"src",
"test",
"test_scripts",
".gitignore",
".travis.yml",
"CHANGES.md",
"CONTRIBUTING.md",
"Doxyfile",
"LICENSE",
"Makefile",
"Makefile.sfmt",
"README.md",
"erlang.mk",
"mix.exs"
],
maintainers: [
"Kenji Rikitake"
],
licenses: ["simplified BSD"],
build_tools: ["make"],
links: %{"GitHub" => "https://github.com/jj1bdx/sfmt-erlang/",
"Docs" => "http://hexdocs.pm/sfmt"}
]
end
end
| 19.910714 | 78 | 0.581614 |
792dab19d1a0c0f5b1c2d6bd4eca555f530c8fa3 | 173 | exs | Elixir | priv/repo/migrations/20211124075928_add_name_to_users.exs | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 1 | 2022-01-30T19:51:39.000Z | 2022-01-30T19:51:39.000Z | priv/repo/migrations/20211124075928_add_name_to_users.exs | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | null | null | null | priv/repo/migrations/20211124075928_add_name_to_users.exs | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | null | null | null | defmodule Sanbase.Repo.Migrations.AddNameToUsers do
use Ecto.Migration
def change do
alter table(:users) do
add(:name, :string, null: true)
end
end
end
| 17.3 | 51 | 0.693642 |
792dbd018e3dabcbd652ec33edfc5265f737078d | 3,192 | ex | Elixir | lib/sippet/proxy/registry.ex | balena/elixir-sippet-proxy | 868f4b1b07f6f214fff084b182076306e580ac9c | [
"BSD-3-Clause"
] | 5 | 2017-06-18T14:19:47.000Z | 2021-08-18T17:08:51.000Z | lib/sippet/proxy/registry.ex | balena/elixir-sippet-proxy | 868f4b1b07f6f214fff084b182076306e580ac9c | [
"BSD-3-Clause"
] | 1 | 2017-11-02T21:44:24.000Z | 2017-11-03T12:19:54.000Z | lib/sippet/proxy/registry.ex | balena/elixir-sippet-proxy | 868f4b1b07f6f214fff084b182076306e580ac9c | [
"BSD-3-Clause"
] | 1 | 2017-11-28T23:10:39.000Z | 2017-11-28T23:10:39.000Z | defmodule Sippet.Proxy.Registry do
@moduledoc """
The Proxy registry, where client and server transaction keys are associated
to controller processes.
"""
@type client_key :: Sippet.Transactions.Client.Key.t
@type server_key :: Sippet.Transactions.Server.Key.t
@doc """
Starts the proxy registry as a supervisor process.
Manually it can be started as:
Sippet.Proxy.Registry.start_link()
In your supervisor tree, you would write:
supervisor(Sippet.Proxy.Registry, [])
The registry is partitioned according to the number of schedulers available.
"""
def start_link() do
args = [partitions: System.schedulers_online()]
Registry.start_link(:unique, __MODULE__, args)
end
@doc """
Takes a `{:via, Registry, {registry, key}}` tuple corresponding to this
registry for the given `key`.
XXX: transform into a `start_child`.
"""
@spec via_tuple(client_key | server_key) ::
{:via, Registry, {__MODULE__, client_key | server_key}}
def via_tuple(%Sippet.Transactions.Client.Key{} = client_key),
do: do_via_tuple(client_key)
def via_tuple(%Sippet.Transactions.Server.Key{} = server_key),
do: do_via_tuple(server_key)
defp do_via_tuple(key), do: {:via, Registry, {__MODULE__, key}}
@doc """
Looks up if a registered process exists to handle the given key.
An empty list if there is no match.
"""
@spec lookup(client_key | server_key) :: pid | nil
def lookup(%Sippet.Transactions.Client.Key{} = client_key),
do: do_lookup(client_key)
def lookup(%Sippet.Transactions.Server.Key{} = server_key),
do: do_lookup(server_key)
defp do_lookup(key) do
case Registry.lookup(__MODULE__, key) do
[{pid, _}] -> pid
[] -> nil
end
end
@doc """
Registers the current process under the given client or server transaction
key in the proxy registry.
By doing so, incoming requests or responses will be redirected to the current
process once they come. The registry is cleared once the process finishes.
"""
@spec register_alias(client_key | server_key) ::
{:ok, pid} | {:error, {:already_registered, pid}}
def register_alias(client_or_server_key)
def register_alias(%Sippet.Transactions.Client.Key{} = client_key),
do: do_register_alias(client_key)
def register_alias(%Sippet.Transactions.Server.Key{} = server_key),
do: do_register_alias(server_key)
defp do_register_alias(key),
do: Registry.register(__MODULE__, key, nil)
@doc """
Unregisters the process registered under the given client or server
transaction key.
"""
@spec unregister_alias(client_key | server_key) :: :ok
def unregister_alias(client_or_server_key)
def unregister_alias(%Sippet.Transactions.Client.Key{} = client_key),
do: do_unregister_alias(client_key)
def unregister_alias(%Sippet.Transactions.Server.Key{} = server_key),
do: do_unregister_alias(server_key)
defp do_unregister_alias(key),
do: Registry.unregister(__MODULE__, key)
@doc """
Returns all client or server keys associated with the given process.
"""
@spec aliases(pid) :: [client_key | server_key]
def aliases(pid), do: Registry.keys(__MODULE__, pid)
end
| 30.113208 | 79 | 0.715539 |
792dbffce1d24bb464aadcdd97bbbf7962e5eaa8 | 6,856 | exs | Elixir | integration_test/sql/transaction.exs | groyoh/ecto_sql | 49a732375b928f83f29f995e874e3a580a9caeb8 | [
"Apache-2.0"
] | 1 | 2019-05-02T14:02:56.000Z | 2019-05-02T14:02:56.000Z | integration_test/sql/transaction.exs | groyoh/ecto_sql | 49a732375b928f83f29f995e874e3a580a9caeb8 | [
"Apache-2.0"
] | null | null | null | integration_test/sql/transaction.exs | groyoh/ecto_sql | 49a732375b928f83f29f995e874e3a580a9caeb8 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Integration.TransactionTest do
# We can keep this test async as long as it
# is the only one access the transactions table
use Ecto.Integration.Case, async: true
import Ecto.Query
alias Ecto.Integration.PoolRepo # Used for writes
alias Ecto.Integration.TestRepo # Used for reads
@moduletag :capture_log
defmodule UniqueError do
defexception message: "unique error"
end
setup do
PoolRepo.delete_all "transactions"
:ok
end
defmodule Trans do
use Ecto.Schema
schema "transactions" do
field :num, :integer
end
end
test "transaction returns value" do
refute PoolRepo.in_transaction?
{:ok, val} = PoolRepo.transaction(fn ->
assert PoolRepo.in_transaction?
{:ok, val} =
PoolRepo.transaction(fn ->
assert PoolRepo.in_transaction?
42
end)
assert PoolRepo.in_transaction?
val
end)
refute PoolRepo.in_transaction?
assert val == 42
end
test "transaction re-raises" do
assert_raise UniqueError, fn ->
PoolRepo.transaction(fn ->
PoolRepo.transaction(fn ->
raise UniqueError
end)
end)
end
end
test "transaction commits" do
PoolRepo.transaction(fn ->
e = PoolRepo.insert!(%Trans{num: 1})
assert [^e] = PoolRepo.all(Trans)
assert [] = TestRepo.all(Trans)
end)
assert [%Trans{num: 1}] = PoolRepo.all(Trans)
end
test "transaction rolls back" do
try do
PoolRepo.transaction(fn ->
e = PoolRepo.insert!(%Trans{num: 2})
assert [^e] = PoolRepo.all(Trans)
assert [] = TestRepo.all(Trans)
raise UniqueError
end)
rescue
UniqueError -> :ok
end
assert [] = TestRepo.all(Trans)
end
test "transaction rolls back per repository" do
message = "cannot call rollback outside of transaction"
assert_raise RuntimeError, message, fn ->
PoolRepo.rollback(:done)
end
assert_raise RuntimeError, message, fn ->
TestRepo.transaction fn ->
PoolRepo.rollback(:done)
end
end
end
test "transaction rolls back with reason on aborted transaction" do
e1 = PoolRepo.insert!(%Trans{num: 13})
assert_raise Ecto.ConstraintError, fn ->
TestRepo.transaction fn ->
PoolRepo.insert!(%Trans{id: e1.id, num: 14})
end
end
end
test "nested transaction partial rollback" do
assert PoolRepo.transaction(fn ->
e1 = PoolRepo.insert!(%Trans{num: 3})
assert [^e1] = PoolRepo.all(Trans)
try do
PoolRepo.transaction(fn ->
e2 = PoolRepo.insert!(%Trans{num: 4})
assert [^e1, ^e2] = PoolRepo.all(from(t in Trans, order_by: t.num))
raise UniqueError
end)
rescue
UniqueError -> :ok
end
assert_raise DBConnection.ConnectionError, "transaction rolling back",
fn() -> PoolRepo.insert!(%Trans{num: 5}) end
end) == {:error, :rollback}
assert TestRepo.all(Trans) == []
end
test "manual rollback doesn't bubble up" do
x = PoolRepo.transaction(fn ->
e = PoolRepo.insert!(%Trans{num: 6})
assert [^e] = PoolRepo.all(Trans)
PoolRepo.rollback(:oops)
end)
assert x == {:error, :oops}
assert [] = TestRepo.all(Trans)
end
test "manual rollback bubbles up on nested transaction" do
assert PoolRepo.transaction(fn ->
e = PoolRepo.insert!(%Trans{num: 7})
assert [^e] = PoolRepo.all(Trans)
assert {:error, :oops} = PoolRepo.transaction(fn ->
PoolRepo.rollback(:oops)
end)
assert_raise DBConnection.ConnectionError, "transaction rolling back",
fn() -> PoolRepo.insert!(%Trans{num: 8}) end
end) == {:error, :rollback}
assert [] = TestRepo.all(Trans)
end
test "transactions are not shared in repo" do
pid = self()
new_pid = spawn_link fn ->
PoolRepo.transaction(fn ->
e = PoolRepo.insert!(%Trans{num: 9})
assert [^e] = PoolRepo.all(Trans)
send(pid, :in_transaction)
receive do
:commit -> :ok
after
5000 -> raise "timeout"
end
end)
send(pid, :committed)
end
receive do
:in_transaction -> :ok
after
5000 -> raise "timeout"
end
assert [] = PoolRepo.all(Trans)
send(new_pid, :commit)
receive do
:committed -> :ok
after
5000 -> raise "timeout"
end
assert [%Trans{num: 9}] = PoolRepo.all(Trans)
end
## Checkout
describe "with checkouts" do
test "transaction inside checkout" do
PoolRepo.checkout(fn ->
refute PoolRepo.in_transaction?
PoolRepo.transaction(fn ->
assert PoolRepo.in_transaction?
end)
refute PoolRepo.in_transaction?
end)
end
test "checkout inside transaction" do
PoolRepo.transaction(fn ->
assert PoolRepo.in_transaction?
PoolRepo.checkout(fn ->
assert PoolRepo.in_transaction?
end)
assert PoolRepo.in_transaction?
end)
end
test "checkout raises on transaction attempt" do
assert_raise DBConnection.ConnectionError, ~r"connection was checked out with status", fn ->
PoolRepo.checkout(fn -> PoolRepo.query!("BEGIN") end)
end
end
end
## Logging
defp register_telemetry() do
Process.put(:telemetry, fn _, measurements, event -> send(self(), {measurements, event}) end)
end
test "log begin, commit and rollback" do
register_telemetry()
PoolRepo.transaction(fn ->
assert_received {measurements, %{params: [], result: :ok}}
assert is_integer(measurements.query_time) and measurements.query_time >= 0
assert is_integer(measurements.queue_time) and measurements.queue_time >= 0
refute_received %{}
register_telemetry()
end)
assert_received {measurements, %{params: [], result: :ok}}
assert is_integer(measurements.query_time) and measurements.query_time >= 0
refute Map.has_key?(measurements, :queue_time)
assert PoolRepo.transaction(fn ->
refute_received %{}
register_telemetry()
PoolRepo.rollback(:log_rollback)
end) == {:error, :log_rollback}
assert_received {measurements, %{params: [], result: :ok}}
assert is_integer(measurements.query_time) and measurements.query_time >= 0
refute Map.has_key?(measurements, :queue_time)
end
test "log queries inside transactions" do
PoolRepo.transaction(fn ->
register_telemetry()
assert [] = PoolRepo.all(Trans)
assert_received {measurements, %{params: [], result: :ok}}
assert is_integer(measurements.query_time) and measurements.query_time >= 0
assert is_integer(measurements.decode_time) and measurements.query_time >= 0
refute Map.has_key?(measurements, :queue_time)
end)
end
end
| 26.369231 | 98 | 0.637544 |
792dc6d88e82fe998106f665a63c5e3286e0e28a | 626 | ex | Elixir | apps/mcam_server/lib/mcam_server/mailing/mailer.ex | paulanthonywilson/mcam | df9c5aaae00b568749dff22613636f5cb92f905a | [
"MIT"
] | null | null | null | apps/mcam_server/lib/mcam_server/mailing/mailer.ex | paulanthonywilson/mcam | df9c5aaae00b568749dff22613636f5cb92f905a | [
"MIT"
] | 8 | 2020-11-16T09:59:12.000Z | 2020-11-16T10:13:07.000Z | apps/mcam_server/lib/mcam_server/mailing/mailer.ex | paulanthonywilson/mcam | df9c5aaae00b568749dff22613636f5cb92f905a | [
"MIT"
] | null | null | null | defmodule McamServer.Mailing.Mailer do
@moduledoc """
Sends the emails.
"""
use Bamboo.Mailer, otp_app: :mcam_server
@dialyzer {:nowarn_function, deliver_later: 1}
import Bamboo.Email, only: [new_email: 1]
def deliver(to, subject, body) do
require Logger
[from: from(), to: to, subject: subject, text_body: body]
|> new_email()
|> deliver_later()
{:ok, %{to: to, body: body}}
end
def from do
env = Application.fetch_env!(:mcam_server, __MODULE__)
case env[:from] do
nil ->
"noreply@#{Keyword.fetch!(env, :domain)}"
res ->
res
end
end
end
| 18.969697 | 61 | 0.616613 |
792dd5ae6a413832f9fa9002613ef5d98cb3d83a | 1,685 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/add_chart_request.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/add_chart_request.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/add_chart_request.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Sheets.V4.Model.AddChartRequest do
@moduledoc """
Adds a chart to a sheet in the spreadsheet.
## Attributes
- chart (EmbeddedChart): The chart that should be added to the spreadsheet, including the position where it should be placed. The chartId field is optional; if one is not set, an id will be randomly generated. (It is an error to specify the ID of a chart that already exists.) Defaults to: `null`.
"""
defstruct [
:"chart"
]
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.AddChartRequest do
import GoogleApi.Sheets.V4.Deserializer
def decode(value, options) do
value
|> deserialize(:"chart", :struct, GoogleApi.Sheets.V4.Model.EmbeddedChart, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.AddChartRequest do
def encode(value, options) do
GoogleApi.Sheets.V4.Deserializer.serialize_non_nil(value, options)
end
end
| 35.104167 | 299 | 0.754303 |
792dd8fbf9f17a3c11d09ed9a6c582b103c1e6d3 | 245 | ex | Elixir | lib/hexedio_web/views/layout_view.ex | netsudo/hexedio | 3e0e1ff7cfffcd704ba60394d0a38c4d8608e100 | [
"MIT"
] | null | null | null | lib/hexedio_web/views/layout_view.ex | netsudo/hexedio | 3e0e1ff7cfffcd704ba60394d0a38c4d8608e100 | [
"MIT"
] | 3 | 2020-07-16T05:37:16.000Z | 2022-03-16T08:58:28.000Z | lib/hexedio_web/views/layout_view.ex | netsudo/hexedio | 3e0e1ff7cfffcd704ba60394d0a38c4d8608e100 | [
"MIT"
] | null | null | null | defmodule HexedioWeb.LayoutView do
use HexedioWeb, :view
alias Hexedio.Auth.Guardian
alias Hexedio.Posts
def maybe_user(conn) do
Guardian.Plug.current_resource(conn)
end
def categories() do
Posts.list_categories
end
end
| 16.333333 | 40 | 0.75102 |
792df07ba8130731016224adfa75f7a6ab56c7a3 | 3,596 | ex | Elixir | lib/builder.ex | nitros12/formex_ecto | 320937c1a99242c62d646da57ca25b05713d81c8 | [
"MIT"
] | 1 | 2018-09-16T18:49:49.000Z | 2018-09-16T18:49:49.000Z | lib/builder.ex | nitros12/formex_ecto | 320937c1a99242c62d646da57ca25b05713d81c8 | [
"MIT"
] | null | null | null | lib/builder.ex | nitros12/formex_ecto | 320937c1a99242c62d646da57ca25b05713d81c8 | [
"MIT"
] | null | null | null | defmodule Formex.BuilderType.Ecto do
@moduledoc false
defstruct [:form]
end
defimpl Formex.BuilderProtocol, for: Formex.BuilderType.Ecto do
alias Formex.Form
alias Formex.Field
alias Formex.FormNested
alias Formex.FormCollection
import Formex.Ecto.Utils
require Ecto.Query
@repo Application.get_env(:formex, :repo)
@spec create_form(Map.t()) :: Map.t()
def create_form(args) do
form = args.form.type.build_form(args.form)
method = if form.struct.id, do: :put, else: :post
form =
form
|> Map.put(:struct, preload_assocs(form))
|> Map.put(:method, method)
|> Form.finish_creating()
# form = form
# |> Map.put(:data, Keyword.put(form.data, :original_struct, form.struct))
# |> Map.put(:struct, copy_preloads_to_struct(form))
Map.put(args, :form, form)
end
@spec create_struct_info(Map.t()) :: Map.t()
def create_struct_info(args) do
form = args.form
struct = struct(form.struct_module)
struct_info =
struct
|> Map.from_struct()
|> Enum.filter(&(elem(&1, 0) !== :__meta__))
|> Enum.map(fn {k, _v} ->
v =
case get_assoc_or_embed(form, k) do
%{cardinality: :many, related: module} ->
{:collection, module}
%{cardinality: :one, related: module} ->
{:nested, module}
_ ->
:any
end
{k, v}
end)
form = Map.put(form, :struct_info, struct_info)
Map.put(args, :form, form)
end
#
defp apply_query(query, custom_query) when is_function(custom_query) do
custom_query.(query)
end
defp apply_query(query, _) do
query
end
defp preload_assocs(form) do
# TODO - creating a new struct for nested when it is nil,
# or handle that empty form and do not displaying it
form.items
|> Enum.filter(fn item ->
case item do
%FormNested{} -> true
%FormCollection{} -> true
%Field{} -> item.type == :multiple_select
_ -> false
end
end)
|> Enum.reduce(form.struct, fn item, struct ->
if is_assoc(form, item.name) do
queryable = struct.__struct__.__schema__(:association, item.name).queryable
query = Ecto.Query.from(e in queryable, order_by: e.id)
query = apply_query(query, item.opts[:query])
struct
|> @repo.preload([
{item.name, query}
])
else
struct
end
end)
end
# written for 0.1.5 and never used. maybe it will be useful some day
# defp copy_preloads_to_struct(form) do
# struct = form.items
# |> Enum.filter(fn item ->
# case item do
# %FormNested{} -> true
# %FormCollection{} -> true
# _ -> false
# end
# end)
# |> Enum.reduce(form.struct, fn item, struct ->
# if is_assoc(form, item.name) do
# {key, val} = case item do
# %FormNested{} ->
# {item.name, copy_preloads_to_struct(item.form)}
# %FormCollection{} ->
# {item.name, Enum.map(item.forms, fn nested ->
# copy_preloads_to_struct(nested.form)
# end)}
# end
# Map.put(struct, key, val)
# else
# struct
# end
# end)
# end
@doc false
@spec get_assoc_or_embed(form :: Form.t(), name :: Atom.t()) :: any
defp get_assoc_or_embed(form, name) do
if is_assoc(form, name) do
form.struct_module.__schema__(:association, name)
else
form.struct_module.__schema__(:embed, name)
end
end
end
| 24.462585 | 83 | 0.581201 |
792e045035db953b52516e8f637f8814b50e0feb | 2,243 | exs | Elixir | config/prod.exs | sotte/python_channels | dcec42d4a787cbb52c8be13a576e5dcadc1fda00 | [
"MIT"
] | null | null | null | config/prod.exs | sotte/python_channels | dcec42d4a787cbb52c8be13a576e5dcadc1fda00 | [
"MIT"
] | null | null | null | config/prod.exs | sotte/python_channels | dcec42d4a787cbb52c8be13a576e5dcadc1fda00 | [
"MIT"
] | null | null | null | use Mix.Config
# For production, we configure the host to read the PORT
# from the system environment. Therefore, you will need
# to set PORT=80 before running your server.
#
# You should also configure the url host to something
# meaningful, we use this information when generating URLs.
#
# Finally, we also include the path to a manifest
# containing the digested version of static files. This
# manifest is generated by the mix phoenix.digest task
# which you typically run after static files are built.
config :python_channels, PythonChannels.Endpoint,
http: [port: {:system, "PORT"}],
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :python_channels, PythonChannels.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :python_channels, PythonChannels.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :python_channels, PythonChannels.Endpoint, server: true
#
# You will also need to set the application root to `.` in order
# for the new static assets to be served after a hot upgrade:
#
# config :python_channels, PythonChannels.Endpoint, root: "."
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"
| 33.984848 | 68 | 0.721801 |
792e08dde6bbd6f763427370159d7734388e9703 | 1,131 | exs | Elixir | config/config.exs | jvoegele/wait_for_it | a3e722dec8d7c291f545533d70d3dab774e7cb4f | [
"Apache-2.0"
] | 10 | 2018-02-19T12:18:44.000Z | 2020-12-03T22:55:48.000Z | config/config.exs | jvoegele/wait_for_it | a3e722dec8d7c291f545533d70d3dab774e7cb4f | [
"Apache-2.0"
] | 11 | 2017-08-30T23:43:06.000Z | 2019-04-05T20:15:42.000Z | config/config.exs | jvoegele/wait_for_it | a3e722dec8d7c291f545533d70d3dab774e7cb4f | [
"Apache-2.0"
] | 2 | 2017-08-30T13:11:29.000Z | 2017-12-28T20:26:42.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :wait_for_it, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:wait_for_it, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.483871 | 73 | 0.753316 |
792e0bd62fe3ee09594927af11fac3ea4bc41903 | 1,102 | ex | Elixir | test/support/conn_case.ex | Namuraid/backend | 04a10248bfeb156eb291207931621b40585d8f7e | [
"MIT"
] | null | null | null | test/support/conn_case.ex | Namuraid/backend | 04a10248bfeb156eb291207931621b40585d8f7e | [
"MIT"
] | null | null | null | test/support/conn_case.ex | Namuraid/backend | 04a10248bfeb156eb291207931621b40585d8f7e | [
"MIT"
] | null | null | null | defmodule Namuraid.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias Namuraid.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Namuraid.Router.Helpers
# The default endpoint for testing
@endpoint Namuraid.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Namuraid.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Namuraid.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 24.488889 | 70 | 0.705989 |
792e14f974e4836051665fc409305d09f601b447 | 3,626 | ex | Elixir | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1beta2__label_annotation.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1beta2__label_annotation.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1beta2__label_annotation.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_LabelAnnotation do
@moduledoc """
Label annotation.
## Attributes
* `categoryEntities` (*type:* `list(GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_Entity.t)`, *default:* `nil`) - Common categories for the detected entity. For example, when the label is `Terrier`, the category is likely `dog`. And in some cases there might be more than one categories e.g., `Terrier` could also be a `pet`.
* `entity` (*type:* `GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_Entity.t`, *default:* `nil`) - Detected entity.
* `frames` (*type:* `list(GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_LabelFrame.t)`, *default:* `nil`) - All video frames where a label was detected.
* `segments` (*type:* `list(GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_LabelSegment.t)`, *default:* `nil`) - All video segments where a label was detected.
* `version` (*type:* `String.t`, *default:* `nil`) - Feature version.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:categoryEntities =>
list(
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_Entity.t()
)
| nil,
:entity =>
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_Entity.t()
| nil,
:frames =>
list(
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_LabelFrame.t()
)
| nil,
:segments =>
list(
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_LabelSegment.t()
)
| nil,
:version => String.t() | nil
}
field(:categoryEntities,
as: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_Entity,
type: :list
)
field(:entity,
as: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_Entity
)
field(:frames,
as: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_LabelFrame,
type: :list
)
field(:segments,
as: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_LabelSegment,
type: :list
)
field(:version)
end
defimpl Poison.Decoder,
for: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_LabelAnnotation do
def decode(value, options) do
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_LabelAnnotation.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1beta2_LabelAnnotation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.989247 | 356 | 0.722008 |
792e4535e3073ea5d194e90c25d87b5d22cbea54 | 13,431 | ex | Elixir | lib/mix/tasks/dialyzer.ex | staring-frog/dialyxir | b78735f75b325238b7db20d9eed22f018cca5f26 | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/dialyzer.ex | staring-frog/dialyxir | b78735f75b325238b7db20d9eed22f018cca5f26 | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/dialyzer.ex | staring-frog/dialyxir | b78735f75b325238b7db20d9eed22f018cca5f26 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Dialyzer do
@shortdoc "Runs dialyzer with default or project-defined flags."
@moduledoc """
This task compiles the mix project, creates a PLT with dependencies if needed and runs `dialyzer`. Much of its behavior can be managed in configuration as described below.
If executed outside of a mix project, it will build the core PLT files and exit.
## Command line options
* `--no-compile` - do not compile even if needed
* `--no-check` - do not perform (quick) check to see if PLT needs update
* `--force-check` - force PLT check also if lock file is unchanged useful
when dealing with local deps.
* `--ignore-exit-status` - display warnings but do not halt the VM or
return an exit status code
* `--list-unused-filters` - list unused ignore filters useful for CI. do
not use with `mix do`.
* `--plt` - only build the required PLT(s) and exit
* `--format short` - format the warnings in a compact format
* `--format raw` - format the warnings in format returned before Dialyzer formatting
* `--format dialyxir` - format the warnings in a pretty printed format
* `--format dialyzer` - format the warnings in the original Dialyzer format
* `--quiet` - suppress all informational messages
Warning flags passed to this task are passed on to `:dialyzer` - e.g.
mix dialyzer --unmatched_returns
## Configuration
All configuration is included under a dialyzer key in the mix project keyword list.
### Flags
You can specify any `dialyzer` command line argument with the :flags keyword.
Dialyzer supports a number of warning flags used to enable or disable certain kinds of analysis features. Until version 0.4, `dialyxir` used by default the additional warning flags shown in the example below. However some of these create warnings that are often more confusing than helpful, particularly to new users of Dialyzer. As of 0.4, there are no longer any flags used by default. To get the old behavior, specify them in your Mix project file. For compatibility reasons you can use either the `-Wwarning` convention of the dialyzer CLI, or (preferred) the `WarnOpts` atoms supported by the [API](http://erlang.org/doc/man/dialyzer.html#gui-1). e.g.
```elixir
def project do
[
app: :my_app,
version: "0.0.1",
deps: deps,
dialyzer: [flags: ["-Wunmatched_returns", :error_handling, :underspecs]]
]
end
```
### PLT Configuration
The task will build a PLT with default core Erlang applications: `:erts :kernel :stdlib :crypto` and re-use this core file in multiple projects - another core file is created for Elixir.
OTP application dependencies are (transitively) added to your project's PLT by default. The applications added are the same as you would see displayed with the command `mix app.tree`. There is also a `:plt_add_deps` option you can set to control the dependencies added. The following options are supported:
* `:apps_direct` - Only Direct OTP runtime application dependencies - not the entire tree
* `:app_tree` - Transitive OTP runtime application dependencies e.g. `mix app.tree` (default)
```
def project do
[
app: :my_app,
version: "0.0.1",
deps: deps,
dialyzer: [plt_add_deps: :apps_direct, plt_add_apps: [:wx]]
]
end
```
You can also configure applications to include in the PLT more directly:
* `dialyzer: :plt_add_apps` - applications to include
*in addition* to the core applications and project dependencies.
* `dialyzer: :plt_ignore_apps` - applications to ignore from the list of core
applications and dependencies.
* `dialyzer: :plt_apps` - a list of applications to include that will replace the default,
include all the apps you need e.g.
### Other Configuration
* `dialyzer: :plt_file` - Deprecated - specify the PLT file name to create and use - default is to create one in the project's current build environment (e.g. _build/dev/) specific to the Erlang/Elixir version used. Note that use of this key in version 0.4 or later will produce a deprecation warning - you can silence the warning by providing a pair with key :no_warn e.g. `plt_file: {:no_warn,"filename"}`.
* `dialyzer: :plt_local_path` - specify the PLT directory name to create and use - default is the project's current build environment (e.g. `_build/dev/`).
* `dialyzer: :plt_core_path` - specify an alternative to `MIX_HOME` to use to store the Erlang and Elixir core files.
* `dialyzer: :ignore_warnings` - specify file path to filter well-known warnings.
"""
use Mix.Task
import System, only: [user_home!: 0]
import Dialyxir.Output, only: [info: 1, error: 1]
alias Dialyxir.Project
alias Dialyxir.Plt
alias Dialyxir.Dialyzer
defmodule Build do
@shortdoc "Build the required PLT(s) and exit."
@moduledoc """
This task compiles the mix project and creates a PLT with dependencies if needed.
It is equivalent to running `mix dialyzer --plt`
## Command line options
* `--no-compile` - do not compile even if needed.
"""
use Mix.Task
def run(args) do
Mix.Tasks.Dialyzer.run(["--plt" | args])
end
end
defmodule Clean do
@shortdoc "Delete PLT(s) and exit."
@moduledoc """
This task deletes PLT files and hash files.
## Command line options
* `--all` - delete also core PLTs.
"""
use Mix.Task
@command_options [all: :boolean]
def run(args) do
{opts, _, _dargs} = OptionParser.parse(args, strict: @command_options)
Mix.Tasks.Dialyzer.clean(opts)
end
end
@default_warnings [:unknown]
@old_options [
halt_exit_status: :boolean
]
@command_options Keyword.merge(@old_options,
force_check: :boolean,
ignore_exit_status: :boolean,
list_unused_filters: :boolean,
no_check: :boolean,
no_compile: :boolean,
plt: :boolean,
quiet: :boolean,
raw: :boolean,
format: :string
)
def run(args) do
{opts, _, dargs} = OptionParser.parse(args, strict: @command_options)
original_shell = Mix.shell()
if opts[:quiet], do: Mix.shell(Mix.Shell.Quiet)
opts = Keyword.delete(opts, :quiet)
check_dialyzer()
compatibility_notice()
if Mix.Project.get() do
Project.check_config()
unless opts[:no_compile], do: Mix.Task.run("compile")
_ =
unless no_check?(opts) do
info("Finding suitable PLTs")
force_check? = Keyword.get(opts, :force_check, false)
check_plt(force_check?)
end
default = Dialyxir.Project.default_ignore_warnings()
ignore_warnings = Dialyxir.Project.dialyzer_ignore_warnings()
cond do
!ignore_warnings && File.exists?(default) ->
info("""
No :ignore_warnings opt specified in mix.exs. Using default: #{default}.
""")
ignore_warnings && File.exists?(ignore_warnings) ->
info("""
ignore_warnings: #{ignore_warnings}
""")
ignore_warnings ->
info("""
:ignore_warnings opt specified in mix.exs: #{ignore_warnings}, but file does not exist.
""")
true ->
info("""
No :ignore_warnings opt specified in mix.exs and default does not exist.
""")
end
warn_old_options(opts)
unless opts[:plt], do: run_dialyzer(opts, dargs)
else
info("No mix project found - checking core PLTs...")
Project.plts_list([], false) |> Plt.check()
end
Mix.shell(original_shell)
end
def clean(opts, fun \\ &delete_plt/4) do
check_dialyzer()
compatibility_notice()
if opts[:all], do: Project.plts_list([], false) |> Plt.check(fun)
if Mix.Project.get() do
{apps, _hash} = dependency_hash()
info("Deleting PLTs")
Project.plts_list(apps, true, true) |> Plt.check(fun)
info("About to delete PLT hash file: #{plt_hash_file()}")
File.rm(plt_hash_file())
end
end
def delete_plt(plt, _, _, _) do
info("About to delete PLT file: #{plt}")
File.rm(plt)
end
defp no_check?(opts) do
case {in_child?(), no_plt?()} do
{true, true} ->
info("In an Umbrella child and no PLT found - building that first.")
build_parent_plt()
true
{true, false} ->
info("In an Umbrella child, not checking PLT...")
true
_ ->
opts[:no_check]
end
end
defp check_plt(force_check?) do
info("Checking PLT...")
{apps, hash} = dependency_hash()
if not force_check? and check_hash?(hash) do
info("PLT is up to date!")
else
Project.plts_list(apps) |> Plt.check()
File.write(plt_hash_file(), hash)
end
end
defp run_dialyzer(opts, dargs) do
args = [
{:check_plt, opts[:force_check] || false},
{:init_plt, String.to_charlist(Project.plt_file())},
{:files, Project.dialyzer_files()},
{:warnings, dialyzer_warnings(dargs)},
{:format, opts[:format]},
{:raw, opts[:raw]},
{:list_unused_filters, opts[:list_unused_filters]},
{:ignore_exit_status, opts[:ignore_exit_status]}
]
{status, exit_status, [time | result]} = Dialyzer.dialyze(args)
info(time)
report = if status == :ok, do: &info/1, else: &error/1
Enum.each(result, report)
unless exit_status == 0 || opts[:ignore_exit_status] do
error("Halting VM with exit status #{exit_status}")
System.halt(exit_status)
end
end
defp dialyzer_warnings(dargs) do
raw_opts = Project.dialyzer_flags() ++ Enum.map(dargs, &elem(&1, 0))
transform(raw_opts) ++ (@default_warnings -- Project.dialyzer_removed_defaults())
end
defp transform(options) when is_list(options), do: Enum.map(options, &transform/1)
defp transform(option) when is_atom(option), do: option
defp transform(option) when is_binary(option) do
option
|> String.replace_leading("-W", "")
|> String.replace("--", "")
|> String.to_atom()
end
defp in_child? do
String.contains?(Mix.Project.config()[:lockfile], "..")
end
defp no_plt? do
not File.exists?(Project.deps_plt())
end
defp build_parent_plt() do
parent = Mix.Project.config()[:lockfile] |> Path.expand() |> Path.dirname()
opts = [into: IO.stream(:stdio, :line), stderr_to_stdout: true, cd: parent]
# It would seem more natural to use Mix.in_project here to start in our parent project.
# However part of the app.tree resolution includes loading all sub apps, and we will
# hit an exception when we try to do that for *this* child, which is already loaded.
{out, rc} = System.cmd("mix", ["dialyzer", "--plt"], opts)
unless rc == 0 do
info("Error building parent PLT, process returned code: #{rc}\n#{out}")
end
end
defp check_dialyzer do
if not Code.ensure_loaded?(:dialyzer) do
error("""
DEPENDENCY MISSING
------------------------
If you are reading this message, then Elixir and Erlang are installed but the
Erlang Dialyzer is not available. Probably this is because you installed Erlang
with your OS package manager and the Dialyzer package is separate.
On Debian/Ubuntu:
`apt-get install erlang-dialyzer`
Fedora:
`yum install erlang-dialyzer`
Arch and Homebrew include Dialyzer in their base erlang packages. Please report a Github
issue to add or correct distribution-specific information.
""")
:erlang.halt(3)
end
end
defp warn_old_options(opts) do
for {opt, _} <- opts, @old_options[opt] do
error("#{opt} is no longer a valid CLI argument.")
end
nil
end
defp compatibility_notice do
old_plt = "#{user_home!()}/.dialyxir_core_*.plt"
if File.exists?(old_plt) &&
(!File.exists?(Project.erlang_plt()) || !File.exists?(Project.elixir_plt())) do
info("""
COMPATIBILITY NOTICE
------------------------
Previous usage of a pre-0.4 version of Dialyxir detected. Please be aware that the 0.4 release
makes a number of changes to previous defaults. Among other things, the PLT task is automatically
run when dialyzer is run, PLT paths have changed,
transitive dependencies are included by default in the PLT, and no additional warning flags
beyond the dialyzer defaults are included. All these properties can be changed in configuration.
(see `mix help dialyzer`).
If you no longer use the older Dialyxir in any projects and do not want to see this notice each time you upgrade your Erlang/Elixir distribution, you can delete your old pre-0.4 PLT files. (`rm ~/.dialyxir_core_*.plt`)
""")
end
end
@spec check_hash?(binary()) :: boolean()
defp check_hash?(hash) do
case File.read(plt_hash_file()) do
{:ok, stored_hash} -> hash == stored_hash
_ -> false
end
end
defp plt_hash_file, do: Project.plt_file() <> ".hash"
@spec dependency_hash :: {[atom()], binary()}
def dependency_hash do
apps = Project.cons_apps()
apps |> inspect() |> info()
hash = :crypto.hash(:sha, lock_file() <> :erlang.term_to_binary(apps))
{apps, hash}
end
def lock_file() do
Mix.Project.config()[:lockfile] |> File.read!()
end
end
| 34.350384 | 659 | 0.656541 |
792e608d83a66cfc30ace6359af4bea001bedaf3 | 1,213 | ex | Elixir | products/lib/products_web/views/error_helpers.ex | DivvyPayHQ/federation_poc | 74839abf7d3eb8e3029468bbe4d335d7b240da97 | [
"MIT"
] | 2 | 2021-09-21T13:36:49.000Z | 2021-09-25T13:17:40.000Z | products/lib/products_web/views/error_helpers.ex | DivvyPayHQ/federation_poc | 74839abf7d3eb8e3029468bbe4d335d7b240da97 | [
"MIT"
] | null | null | null | products/lib/products_web/views/error_helpers.ex | DivvyPayHQ/federation_poc | 74839abf7d3eb8e3029468bbe4d335d7b240da97 | [
"MIT"
] | null | null | null | defmodule ProductsWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(ProductsWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(ProductsWeb.Gettext, "errors", msg, opts)
end
end
end
| 35.676471 | 77 | 0.671888 |
792e7fead3c647e343ada369a7184d41e3cf0c05 | 3,481 | ex | Elixir | web/views/entry_view.ex | mntns/artus | 958380f42612ec0bc9d059037cf7b59dfbe1cfa9 | [
"MIT"
] | null | null | null | web/views/entry_view.ex | mntns/artus | 958380f42612ec0bc9d059037cf7b59dfbe1cfa9 | [
"MIT"
] | null | null | null | web/views/entry_view.ex | mntns/artus | 958380f42612ec0bc9d059037cf7b59dfbe1cfa9 | [
"MIT"
] | null | null | null | defmodule Artus.EntryView do
use Artus.Web, :view
import Ecto.Query
alias Artus.Repo
alias Artus.Tag
alias Artus.Cache
# TODO: Branch in Entry? Part/Type in Entry?
def render_tag(tag) do
tag.tag |> Artus.NotMarkdown.to_html()
end
def get_label(key) do
field_defs = Artus.DefinitionManager.field_defs
case field_defs[key] do
nil -> inspect key
"type" -> "Type"
x -> x["label"]
end
end
def get_language(language) do
lang_defs = Artus.DefinitionManager.languages
l_filtered = lang_defs |> Enum.filter(fn(l) -> l["value"] == language end) |> hd
l_filtered["label"]
end
def get_part(part_number) do
if part_number == 3 do
"Reprint"
else
option_defs = Artus.DefinitionManager.options
o_filtered = option_defs["parts"]
|> Enum.filter(fn(o) -> o["value"] == part_number end) |> hd
o_filtered["label"]
end
end
def get_type(type) do
if type == "r" do
"Review"
else
option_defs = Artus.DefinitionManager.options
o_filtered = option_defs["types"] |> Enum.filter(fn(o) -> o["value"] == type end) |> hd
o_filtered["label"]
end
end
@doc "Returns DOI referer link"
def doi_link(doi) do
# TODO: Check DOI with Regex
"https://dx.doi.org/" <> doi
end
def get_branch(branch_int) do
branches = Artus.DefinitionManager.branches
case branches["#{branch_int}"] do
x -> x
end
end
def belongs_to_user?(user_id, entry_id) do
query = from c in Cache,
where: c.user == ^(user_id),
select: c.entries
query |> Repo.all |> List.flatten |> Enum.member?(entry_id)
end
@doc """
Prepares entry for table rendering
It fetches the keys from the `DefinitionManager` and
builds a keyword list using the keys and the entry
"""
def prepare_entry_table(entry) do
entry_map = Map.from_struct(entry)
to_remove = [:abstract, :internal_comment]
Artus.DefinitionManager.fields()[entry.type]
|> Enum.map(fn [k, _] -> String.to_atom(k) end)
|> (&(&1 -- to_remove)).()
|> Enum.map(fn(k) -> {k, entry_map[k]} end)
|> Enum.filter(fn({k, v}) -> !is_nil(v) end)
end
def render_label(:part), do: "Part"
def render_label(:type), do: "Type"
def render_label(:editor), do: "Editor(s)"
def render_label(key) do
key |> Atom.to_string |> get_label()
end
def render_value(:editor, value) do
value |> Artus.SharedView.style_editors() |> raw()
end
def render_value(:author, value) do
value |> Artus.SharedView.style_authors() |> raw()
end
def render_value(:part, value) do
get_part(value)
end
def render_value(:doi, value) do
render Artus.EntryView, "field_doi.html", %{doi: value}
end
def render_value(:language, value) do
language_map = Artus.DefinitionManager.languages()
|> Enum.filter(fn(m) -> m["value"] == value end)
|> hd()
case language_map["label"] do
nil -> value
x -> x
end
end
def render_value(:links, value) do
# TODO: Refactor field_links.html
render Artus.EntryView, "field_links.html", %{links: value}
end
def render_value(key, value) do
value
end
def render_entry_status(entry) do
case entry.public do
true -> "<span class=\"badge badge-pill badge-success\">Public</span>"
false -> "<span class=\"badge badge-pill badge-warning\">Not public</span>"
end
end
end
| 25.977612 | 93 | 0.62913 |
792e84a28affe7252d3b42ac7208d9f3e05f0242 | 1,360 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/resume_proposal_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/resume_proposal_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/resume_proposal_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdExchangeBuyer.V2beta1.Model.ResumeProposalRequest do
@moduledoc """
Request message to resume (unpause) serving for an already-finalized
proposal.
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.ResumeProposalRequest do
def decode(value, options) do
GoogleApi.AdExchangeBuyer.V2beta1.Model.ResumeProposalRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.ResumeProposalRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.627907 | 93 | 0.772794 |
792e87f8c9b651d72d842cac2fdb1318dce986fa | 33,341 | ex | Elixir | apps/cms/lib/custom_html5_scrubber.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 42 | 2019-05-29T16:05:30.000Z | 2021-08-09T16:03:37.000Z | apps/cms/lib/custom_html5_scrubber.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 872 | 2019-05-29T17:55:50.000Z | 2022-03-30T09:28:43.000Z | apps/cms/lib/custom_html5_scrubber.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 12 | 2019-07-01T18:33:21.000Z | 2022-03-10T02:13:57.000Z | defmodule CMS.CustomHTML5Scrubber do
@moduledoc """
Created to be used in place of HtmlSanitizeEx.html5
so we can add to the list of allowed attributes.
Custom additions are:
- Inclusion of 'mailto' in valid_schemes
- Provides missing quotes to "alt crossorigin usemap ismap width height"
"""
require HtmlSanitizeEx.Scrubber.Meta
alias HtmlSanitizeEx.Scrubber.Meta
# Removes any CDATA tags before the traverser/scrubber runs.
Meta.remove_cdata_sections_before_scrub()
Meta.strip_comments()
@valid_schemes ["http", "https", "mailto", "tel"]
Meta.allow_tag_with_uri_attributes("a", ["href"], @valid_schemes)
Meta.allow_tag_with_these_attributes("a", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"target",
"ping",
"rel",
"media",
"hreflang",
"type"
])
Meta.allow_tag_with_these_attributes("b", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("blockquote", [
"accesskey",
"cite",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("br", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("caption", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("code", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("del", [
"accesskey",
"cite",
"datetime",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("div", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("em", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("figure", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("figcaption", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("h1", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("h2", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("h3", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("h4", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("h5", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("h6", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("head", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("header", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("hgroup", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("hr", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("html", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"manifest"
])
Meta.allow_tag_with_these_attributes("i", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_uri_attributes("iframe", ["src"], @valid_schemes)
Meta.allow_tag_with_these_attributes("iframe", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"name",
"sandbox",
"seamless",
"width",
"height"
])
Meta.allow_tag_with_uri_attributes("img", ["src", "lowsrc", "srcset"], @valid_schemes)
Meta.allow_tag_with_these_attributes("img", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"alt",
"crossorigin",
"usemap",
"ismap",
"width",
"height"
])
Meta.allow_tag_with_uri_attributes("input", ["src"], @valid_schemes)
Meta.allow_tag_with_these_attributes("input", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"accept",
"alt",
"autocomplete",
"autofocus",
"checked",
"dirname",
"disabled",
"form",
"formaction",
"formenctype",
"formmethod",
"formnovalidate",
"formtarget",
"height",
"inputmode",
"list",
"max",
"maxlength",
"min",
"multiple",
"name",
"pattern",
"placeholder",
"readonly",
"required",
"size",
"step",
"type",
"value",
"width"
])
Meta.allow_tag_with_these_attributes("ins", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"cite",
"datetime"
])
Meta.allow_tag_with_these_attributes("kbd", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("keygen", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"autofocus",
"challenge",
"disabled",
"form",
"keytype",
"name"
])
Meta.allow_tag_with_these_attributes("label", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"form",
"for"
])
Meta.allow_tag_with_these_attributes("legend", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("li", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"value"
])
Meta.allow_tag_with_these_attributes("map", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"name"
])
Meta.allow_tag_with_these_attributes("mark", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("menu", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"type",
"label"
])
Meta.allow_tag_with_these_attributes("meta", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"name",
"http-equiv",
"content",
"charset"
])
Meta.allow_tag_with_these_attributes("meter", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"value",
"min",
"max",
"low",
"high",
"optimum"
])
Meta.allow_tag_with_these_attributes("nav", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("object", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"data",
"type",
"typemustmatch",
"name",
"usemap",
"form",
"width",
"height"
])
Meta.allow_tag_with_these_attributes("ol", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"reversed",
"start"
])
Meta.allow_tag_with_these_attributes("optgroup", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"disabled",
"label"
])
Meta.allow_tag_with_these_attributes("option", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"disabled",
"label",
"selected",
"value"
])
Meta.allow_tag_with_these_attributes("output", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"for",
"form",
"name"
])
Meta.allow_tag_with_these_attributes("p", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("param", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"name",
"value"
])
Meta.allow_tag_with_these_attributes("pre", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("progress", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"value",
"max"
])
Meta.allow_tag_with_these_attributes("q", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"cite"
])
Meta.allow_tag_with_these_attributes("rp", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("rt", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("ruby", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("s", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("samp", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("section", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("select", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"autofocus",
"disabled",
"form",
"multiple",
"name",
"required",
"size"
])
Meta.allow_tag_with_these_attributes("small", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_uri_attributes("source", ["src"], @valid_schemes)
Meta.allow_tag_with_these_attributes("source", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"type",
"media"
])
Meta.allow_tag_with_these_attributes("span", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("strong", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("sub", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("summary", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("sup", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("table", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("tbody", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("td", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"colspan",
"rowspan",
"headers"
])
Meta.allow_tag_with_these_attributes("textarea", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"autocomplete",
"autofocus",
"cols",
"dirname",
"disabled",
"form",
"inputmode",
"maxlength",
"name",
"placeholder",
"readonly",
"required",
"rows",
"wrap"
])
Meta.allow_tag_with_these_attributes("tfoot", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("th", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"colspan",
"rowspan",
"headers",
"scope",
"abbr"
])
Meta.allow_tag_with_these_attributes("thead", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("time", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"datetime",
"pubdate"
])
Meta.allow_tag_with_these_attributes("title", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("tr", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_uri_attributes("track", ["src"], @valid_schemes)
Meta.allow_tag_with_these_attributes("track", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"default",
"kind",
"label",
"srclang"
])
Meta.allow_tag_with_these_attributes("u", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("ul", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("var", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_uri_attributes("video", ["src"], @valid_schemes)
Meta.allow_tag_with_these_attributes("video", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"crossorigin",
"poster",
"preload",
"autoplay",
"mediagroup",
"loop",
"muted",
"controls",
"width",
"height"
])
Meta.allow_tag_with_these_attributes("wbr", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tags_with_style_attributes([
"a",
"blockquote",
"br",
"code",
"del",
"em",
"h1",
"h2",
"h3",
"h4",
"h5",
"h6",
"head",
"header",
"hgroup",
"hr",
"html",
"i",
"iframe",
"img",
"input",
"ins",
"kbd",
"keygen",
"label",
"legend",
"li",
"link",
"map",
"mark",
"menu",
"meta",
"meter",
"nav",
"noscript",
"object",
"ol",
"optgroup",
"option",
"output",
"p",
"param",
"pre",
"progress",
"q",
"rp",
"rt",
"ruby",
"s",
"samp",
"script",
"section",
"select",
"small",
"source",
"span",
"strong",
"sub",
"summary",
"sup",
"table",
"tbody",
"td",
"textarea",
"tfoot",
"th",
"thead",
"time",
"title",
"tr",
"track",
"u",
"ul",
"var",
"video",
"wbr"
])
Meta.strip_everything_not_covered()
@spec html5(String.t()) :: String.t()
def html5(html) do
html |> HtmlSanitizeEx.Scrubber.scrub(__MODULE__)
end
defp scrub_css(text) do
HtmlSanitizeEx.Scrubber.CSS.scrub(text)
end
end
| 15.631036 | 88 | 0.510363 |
792e898fe9e3935a9a296bc80fe82a65c51710f3 | 2,432 | ex | Elixir | lib/nsq/lookupd.ex | amokan/elixir_nsq | 26e9cdf8f6c99b6688e540181a501f53aa5e9e4b | [
"MIT"
] | null | null | null | lib/nsq/lookupd.ex | amokan/elixir_nsq | 26e9cdf8f6c99b6688e540181a501f53aa5e9e4b | [
"MIT"
] | null | null | null | lib/nsq/lookupd.ex | amokan/elixir_nsq | 26e9cdf8f6c99b6688e540181a501f53aa5e9e4b | [
"MIT"
] | null | null | null | defmodule NSQ.Lookupd do
alias NSQ.Connection, as: C
require Logger
@typedoc """
All lookupd responses should return a map with these values. If the response
is not of that form, it should be normalized into that form.
"""
@type response :: %{
data: binary,
headers: [any],
status_code: integer,
status_txt: binary,
}
@spec nsqds_with_topic([C.host_with_port], String.t) :: [C.host_with_port]
def nsqds_with_topic(lookupds, topic) do
responses = Enum.map(lookupds, &topics_from_lookupd(&1, topic))
nsqds = Enum.map responses, fn(response) ->
Enum.map response["producers"] || [], fn(producer) ->
if producer do
{producer["broadcast_address"], producer["tcp_port"]}
else
nil
end
end
end
nsqds |>
List.flatten |>
Enum.uniq |>
Enum.reject(fn(v) -> v == nil end)
end
@spec topics_from_lookupd(C.host_with_port, String.t) :: response
def topics_from_lookupd({host, port}, topic) do
lookupd_url = "http://#{host}:#{port}/lookup?topic=#{topic}"
headers = [{"Accept", "application/vnd.nsq; version=1.0"}]
try do
case HTTPotion.get(lookupd_url, headers: headers) do
%HTTPotion.Response{status_code: 200, body: body, headers: headers} ->
normalize_200_response(headers, body)
%HTTPotion.Response{status_code: 404} ->
%{} |> normalize_response
%HTTPotion.Response{status_code: status, body: body} ->
Logger.error "Unexpected status code from #{lookupd_url}: #{status}"
%{status_code: status, data: body}
|> normalize_response
end
rescue
e in HTTPotion.HTTPError ->
Logger.error "Error connecting to #{lookupd_url}: #{inspect e}"
%{status_code: nil, status_txt: nil, data: nil}
end
end
@spec normalize_200_response([any], binary) :: response
defp normalize_200_response(headers, body) do
body = if body == nil || body == "", do: "{}", else: body
if headers[:"X-Nsq-Content-Type"] == "nsq; version=1.0" do
Poison.decode!(body)
|> normalize_response
else
%{status_code: 200, status_txt: "OK", data: body}
|> normalize_response
end
end
@spec normalize_response(map) :: response
defp normalize_response(m) do
Map.merge(%{
status_code: nil,
status_txt: "",
data: "",
headers: []
}, m)
end
end
| 29.301205 | 78 | 0.622944 |
792e94a7effdc649be6e0563a12820ff9c0e3f3e | 349 | ex | Elixir | lib/keywords/application.ex | GunnarPDX/keyword_parser | b6013a5c156672761c71c5b927874c3d9b5567c3 | [
"MIT"
] | 1 | 2021-08-17T02:31:07.000Z | 2021-08-17T02:31:07.000Z | lib/keywords/application.ex | GunnarPDX/keyword_parser | b6013a5c156672761c71c5b927874c3d9b5567c3 | [
"MIT"
] | null | null | null | lib/keywords/application.ex | GunnarPDX/keyword_parser | b6013a5c156672761c71c5b927874c3d9b5567c3 | [
"MIT"
] | 1 | 2021-08-17T02:31:14.000Z | 2021-08-17T02:31:14.000Z | defmodule Keywords.Application do
@moduledoc false
use Application
@impl true
def start(_type, _args) do
children = [
{DynamicSupervisor, strategy: :one_for_one, name: Keywords.PatternSupervisor},
{Registry, keys: :unique, name: PatternRegistry}
]
Supervisor.start_link(children, strategy: :one_for_one)
end
end | 21.8125 | 84 | 0.713467 |
792eb312ee9ca7da26a00e09a6cca6ee537c5731 | 487 | ex | Elixir | lib/vutuv_web/views/error_view.ex | hendri-tobing/vutuv | 50a3095e236fe96739a79954157b74b4c4025921 | [
"MIT"
] | null | null | null | lib/vutuv_web/views/error_view.ex | hendri-tobing/vutuv | 50a3095e236fe96739a79954157b74b4c4025921 | [
"MIT"
] | null | null | null | lib/vutuv_web/views/error_view.ex | hendri-tobing/vutuv | 50a3095e236fe96739a79954157b74b4c4025921 | [
"MIT"
] | null | null | null | defmodule VutuvWeb.ErrorView do
use VutuvWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 28.647059 | 61 | 0.73306 |
792ec63c9d4fb72d049fdf05584c3933132c5fde | 5,438 | ex | Elixir | lib/central/account/schemas/user.ex | Teifion/central | feaeb35ec855b62a3ff8de6c364f6190ef0a196c | [
"MIT"
] | 1 | 2022-01-03T16:36:25.000Z | 2022-01-03T16:36:25.000Z | lib/central/account/schemas/user.ex | Teifion/central | feaeb35ec855b62a3ff8de6c364f6190ef0a196c | [
"MIT"
] | null | null | null | lib/central/account/schemas/user.ex | Teifion/central | feaeb35ec855b62a3ff8de6c364f6190ef0a196c | [
"MIT"
] | null | null | null | defmodule Central.Account.User do
@moduledoc false
use CentralWeb, :schema
@behaviour Bodyguard.Policy
alias Argon2
# import Central.Account.AuthLib, only: [allow?: 2]
@extra_fields []
schema "account_users" do
field :name, :string
field :email, :string
field :password, :string
field :icon, :string
field :colour, :string
field :data, :map, default: %{}
field :permissions, {:array, :string}, default: []
has_many :user_configs, Central.Config.UserConfig
has_many :reports_against, Central.Account.Report, foreign_key: :target_id
has_many :reports_made, Central.Account.Report, foreign_key: :reporter_id
has_many :reports_responded, Central.Account.Report, foreign_key: :responder_id
# Extra user.ex relations go here
belongs_to :admin_group, Central.Account.Group
many_to_many :groups, Central.Account.Group,
join_through: "account_group_memberships",
join_keys: [user_id: :id, group_id: :id]
timestamps()
end
@doc false
def changeset(user, attrs \\ %{}) do
attrs = attrs
|> remove_whitespace([:email])
if attrs["password"] == "" do
user
|> cast(
attrs,
[:name, :email, :icon, :colour, :permissions, :admin_group_id, :data] ++ @extra_fields
)
|> validate_required([:name, :email, :icon, :colour, :permissions])
|> unique_constraint(:email)
else
user
|> cast(
attrs,
[
:name,
:email,
:password,
:icon,
:colour,
:permissions,
:admin_group_id,
:data
] ++ @extra_fields
)
|> validate_required([:name, :email, :password, :icon, :colour, :permissions])
|> unique_constraint(:email)
|> put_password_hash()
end
end
def changeset(user, attrs, :script) do
attrs = attrs
|> remove_whitespace([:email])
user
|> cast(
attrs,
[
:name,
:email,
:password,
:icon,
:colour,
:permissions,
:admin_group_id,
:data
] ++ @extra_fields
)
|> validate_required([:name, :email, :icon, :colour, :permissions])
|> unique_constraint(:email)
end
def changeset(struct, params, nil), do: changeset(struct, params)
def changeset(struct, permissions, :permissions) do
cast(struct, %{permissions: permissions}, [:permissions])
end
def changeset(user, attrs, :self_create) do
attrs = attrs
|> remove_whitespace([:email])
user
|> cast(attrs, [:name, :email])
|> validate_required([:name, :email])
|> unique_constraint(:email)
|> change_password(attrs)
end
def changeset(user, attrs, :limited) do
attrs = attrs
|> remove_whitespace([:email])
user
|> cast(attrs, [:name, :email, :icon, :colour] ++ @extra_fields)
|> validate_required([:name, :email, :icon, :colour])
|> unique_constraint(:email)
end
def changeset(user, attrs, :limited_with_data) do
attrs = attrs
|> remove_whitespace([:email])
user
|> cast(attrs, [:name, :email, :icon, :colour, :data] ++ @extra_fields)
|> validate_required([:name, :email, :icon, :colour])
|> unique_constraint(:email)
end
def changeset(user, attrs, :user_form) do
attrs = attrs
|> remove_whitespace([:email])
cond do
attrs["password"] == nil or attrs["password"] == "" ->
user
|> cast(attrs, [:name, :email])
|> validate_required([:name, :email])
|> add_error(
:password_confirmation,
"Please enter your password to change your account details."
)
verify_password(attrs["password"], user.password) == false ->
user
|> cast(attrs, [:name, :email])
|> validate_required([:name, :email])
|> add_error(:password_confirmation, "Incorrect password")
true ->
user
|> cast(attrs, [:name, :email])
|> validate_required([:name, :email])
|> unique_constraint(:email)
end
end
def changeset(user, attrs, :password) do
cond do
attrs["existing"] == nil or attrs["existing"] == "" ->
user
|> change_password(attrs)
|> add_error(
:password_confirmation,
"Please enter your existing password to change your password."
)
verify_password(attrs["existing"], user.password) == false ->
user
|> change_password(attrs)
|> add_error(:existing, "Incorrect password")
true ->
user
|> change_password(attrs)
end
end
defp change_password(user, attrs) do
user
|> cast(attrs, [:password])
|> validate_length(:password, min: 6)
|> validate_confirmation(:password, message: "Does not match password")
|> put_password_hash()
end
defp put_password_hash(
%Ecto.Changeset{valid?: true, changes: %{password: password}} = changeset
) do
change(changeset, password: Argon2.hash_pwd_salt(password))
end
defp put_password_hash(changeset), do: changeset
@spec verify_password(String.t(), String.t()) :: boolean
def verify_password(plain_text_password, encrypted) do
Argon2.verify_pass(plain_text_password, encrypted)
end
@spec authorize(any, Plug.Conn.t(), atom) :: boolean
def authorize(_, conn, _), do: allow?(conn, "admin.user")
# def authorize(_, _, _), do: false
end
| 26.270531 | 94 | 0.607392 |
792ee135c4cc8e7ea111d2e07e4bbc6c05997626 | 879 | ex | Elixir | apps/jobex_web/test/support/conn_case.ex | andyl/jobex | a51d6ecc4e8e8d62ba7cdf3796305a5da369e4e2 | [
"MIT"
] | 10 | 2019-10-24T01:23:07.000Z | 2020-02-23T00:27:32.000Z | apps/jobex_web/test/support/conn_case.ex | andyl/crow | a51d6ecc4e8e8d62ba7cdf3796305a5da369e4e2 | [
"MIT"
] | 6 | 2021-03-09T18:41:09.000Z | 2021-05-27T13:02:42.000Z | apps/jobex_web/test/support/conn_case.ex | andyl/crow | a51d6ecc4e8e8d62ba7cdf3796305a5da369e4e2 | [
"MIT"
] | 1 | 2019-10-24T01:23:08.000Z | 2019-10-24T01:23:08.000Z | defmodule JobexWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias JobexWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint JobexWeb.Endpoint
end
end
setup _tags do
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 26.636364 | 59 | 0.726962 |
792eec96e3f72331d44d250c8453ac8baa3b4b83 | 924 | ex | Elixir | apps/neoscan/lib/neoscan/transactions/vout.ex | vincentgeneste/neo-scan | 4a654575331eeb3eb12d4fd61696a7bd6dbca3ce | [
"MIT"
] | 75 | 2017-07-23T02:45:32.000Z | 2021-12-13T11:04:17.000Z | apps/neoscan/lib/neoscan/transactions/vout.ex | vincentgeneste/neo-scan | 4a654575331eeb3eb12d4fd61696a7bd6dbca3ce | [
"MIT"
] | 252 | 2017-07-13T19:36:00.000Z | 2021-07-28T18:40:00.000Z | apps/neoscan/lib/neoscan/transactions/vout.ex | vincentgeneste/neo-scan | 4a654575331eeb3eb12d4fd61696a7bd6dbca3ce | [
"MIT"
] | 87 | 2017-07-23T02:45:34.000Z | 2022-03-02T14:54:27.000Z | defmodule Neoscan.Vout do
@moduledoc false
use Ecto.Schema
alias Neoscan.Transaction
alias Neoscan.Address
alias Neoscan.Asset
@primary_key false
schema "vouts" do
belongs_to(
:transaction,
Transaction,
foreign_key: :transaction_id,
references: :id,
type: :integer
)
field(:transaction_hash, :binary, primary_key: true)
field(:n, :integer, primary_key: true)
belongs_to(
:address,
Address,
foreign_key: :address_hash,
references: :hash,
type: :binary
)
belongs_to(
:asset,
Asset,
foreign_key: :asset_hash,
references: :transaction_hash,
type: :binary
)
field(:value, :decimal)
field(:block_time, :utc_datetime)
field(:claimed, :boolean)
field(:spent, :boolean)
field(:start_block_index, :integer)
field(:end_block_index, :integer)
timestamps()
end
end
| 19.659574 | 56 | 0.635281 |
792f0490d38f9fce99db48c5429dcd47fd157649 | 599 | exs | Elixir | test/honeydew/worker_supervisor_test.exs | evadne/honeydew | c3c2f6095a28393cae13c0e686bdb6257d532ca1 | [
"MIT"
] | null | null | null | test/honeydew/worker_supervisor_test.exs | evadne/honeydew | c3c2f6095a28393cae13c0e686bdb6257d532ca1 | [
"MIT"
] | null | null | null | test/honeydew/worker_supervisor_test.exs | evadne/honeydew | c3c2f6095a28393cae13c0e686bdb6257d532ca1 | [
"MIT"
] | null | null | null | defmodule Honeydew.WorkerSupervisorTest do
use ExUnit.Case, async: true
setup do
pool = :erlang.unique_integer
Honeydew.create_groups(pool)
{:ok, supervisor} = Honeydew.WorkerSupervisor.start_link(pool, %{ma: {Stateful, [:state_here]}, num: 7, init_retry: 5, shutdown: 10_000}, nil)
# on_exit fn ->
# Supervisor.stop(supervisor)
# Honeydew.delete_groups(pool)
# end
[supervisor: supervisor]
end
test "starts the correct number of workers", %{supervisor: supervisor} do
assert supervisor |> Supervisor.which_children |> Enum.count == 7
end
end
| 26.043478 | 146 | 0.694491 |
792f1908749c182c4ed61f374df26155525cb9dd | 7,624 | ex | Elixir | lib/livebook/notebook.ex | qrede/livebook | 5f3230f7aa6b638825bd43c6a84302a694240ddf | [
"Apache-2.0"
] | 1 | 2021-05-21T22:14:23.000Z | 2021-05-21T22:14:23.000Z | lib/livebook/notebook.ex | cristina007-bot/livebook | 101f582196a161a06b62b38fe55c7f8c0a04b54d | [
"Apache-2.0"
] | null | null | null | lib/livebook/notebook.ex | cristina007-bot/livebook | 101f582196a161a06b62b38fe55c7f8c0a04b54d | [
"Apache-2.0"
] | null | null | null | defmodule Livebook.Notebook do
@moduledoc false
# Data structure representing a notebook.
#
# A notebook is just the representation and roughly
# maps to a file that the user can edit.
# A notebook *session* is a living process that holds a specific
# notebook instance and allows users to collaboratively apply
# changes to this notebook.
#
# A notebook is divided into a set of isolated *sections*.
defstruct [:name, :version, :sections, :metadata]
alias Livebook.Notebook.{Section, Cell}
@type metadata :: %{String.t() => term()}
@type t :: %__MODULE__{
name: String.t(),
version: String.t(),
sections: list(Section.t()),
metadata: metadata()
}
@version "1.0"
@doc """
Returns a blank notebook.
"""
@spec new() :: t()
def new() do
%__MODULE__{
name: "Untitled notebook",
version: @version,
sections: [],
metadata: %{}
}
end
@doc """
Finds notebook section by id.
"""
@spec fetch_section(t(), Section.id()) :: {:ok, Section.t()} | :error
def fetch_section(notebook, section_id) do
Enum.find_value(notebook.sections, :error, fn section ->
section.id == section_id && {:ok, section}
end)
end
@doc """
Finds notebook cell by `id` and the corresponding section.
"""
@spec fetch_cell_and_section(t(), Cell.id()) :: {:ok, Cell.t(), Section.t()} | :error
def fetch_cell_and_section(notebook, cell_id) do
for(
section <- notebook.sections,
cell <- section.cells,
cell.id == cell_id,
do: {cell, section}
)
|> case do
[{cell, section}] -> {:ok, cell, section}
[] -> :error
end
end
@doc """
Finds a cell being `offset` from the given cell (with regard to all sections).
"""
@spec fetch_cell_sibling(t(), Cell.id(), integer()) :: {:ok, Cell.t()} | :error
def fetch_cell_sibling(notebook, cell_id, offset) do
all_cells = for(section <- notebook.sections, cell <- section.cells, do: cell)
with idx when idx != nil <- Enum.find_index(all_cells, &(&1.id == cell_id)),
sibling_idx <- idx + offset,
true <- 0 <= sibling_idx and sibling_idx < length(all_cells) do
{:ok, Enum.at(all_cells, sibling_idx)}
else
_ -> :error
end
end
@doc """
Inserts `section` at the given `index`.
"""
@spec insert_section(t(), integer(), Section.t()) :: t()
def insert_section(notebook, index, section) do
sections = List.insert_at(notebook.sections, index, section)
%{notebook | sections: sections}
end
@doc """
Inserts `cell` at the given `index` within section identified by `section_id`.
"""
@spec insert_cell(t(), Section.id(), integer(), Cell.t()) :: t()
def insert_cell(notebook, section_id, index, cell) do
sections =
Enum.map(notebook.sections, fn section ->
if section.id == section_id do
%{section | cells: List.insert_at(section.cells, index, cell)}
else
section
end
end)
%{notebook | sections: sections}
end
@doc """
Deletes section with the given id.
"""
@spec delete_section(t(), Section.id()) :: t()
def delete_section(notebook, section_id) do
sections = Enum.reject(notebook.sections, &(&1.id == section_id))
%{notebook | sections: sections}
end
@doc """
Deletes cell with the given id.
"""
@spec delete_cell(t(), Cell.id()) :: t()
def delete_cell(notebook, cell_id) do
sections =
Enum.map(notebook.sections, fn section ->
%{section | cells: Enum.reject(section.cells, &(&1.id == cell_id))}
end)
%{notebook | sections: sections}
end
@doc """
Updates cell with the given function.
"""
@spec update_cell(t(), Cell.id(), (Cell.t() -> Cell.t())) :: t()
def update_cell(notebook, cell_id, fun) do
sections =
Enum.map(notebook.sections, fn section ->
cells =
Enum.map(section.cells, fn cell ->
if cell.id == cell_id, do: fun.(cell), else: cell
end)
%{section | cells: cells}
end)
%{notebook | sections: sections}
end
@doc """
Updates section with the given function.
"""
@spec update_section(t(), Section.id(), (Section.t() -> Section.t())) :: t()
def update_section(notebook, section_id, fun) do
sections =
Enum.map(notebook.sections, fn section ->
if section.id == section_id, do: fun.(section), else: section
end)
%{notebook | sections: sections}
end
@doc """
Moves cell by the given offset.
The cell may move to another section if the offset indicates so.
"""
@spec move_cell(t(), Cell.id(), integer()) :: t()
def move_cell(notebook, cell_id, offset) do
# We firstly create a flat list of cells interspersed with `:separator`
# at section boundaries. Then we move the given cell by the given offset.
# Finally we split the flat list back into cell lists
# and put them in the corresponding sections.
separated_cells =
notebook.sections
|> Enum.map_intersperse(:separator, & &1.cells)
|> List.flatten()
idx =
Enum.find_index(separated_cells, fn
:separator -> false
cell -> cell.id == cell_id
end)
new_idx = (idx + offset) |> clamp_index(separated_cells)
{cell, separated_cells} = List.pop_at(separated_cells, idx)
separated_cells = List.insert_at(separated_cells, new_idx, cell)
cell_groups = group_cells(separated_cells)
sections =
notebook.sections
|> Enum.zip(cell_groups)
|> Enum.map(fn {section, cells} -> %{section | cells: cells} end)
%{notebook | sections: sections}
end
defp group_cells(separated_cells) do
separated_cells
|> Enum.reverse()
|> do_group_cells([])
end
defp do_group_cells([], groups), do: groups
defp do_group_cells([:separator | separated_cells], []) do
do_group_cells(separated_cells, [[], []])
end
defp do_group_cells([:separator | separated_cells], groups) do
do_group_cells(separated_cells, [[] | groups])
end
defp do_group_cells([cell | separated_cells], []) do
do_group_cells(separated_cells, [[cell]])
end
defp do_group_cells([cell | separated_cells], [group | groups]) do
do_group_cells(separated_cells, [[cell | group] | groups])
end
defp clamp_index(index, list) do
index |> max(0) |> min(length(list) - 1)
end
@doc """
Returns a list of `{cell, section}` pairs including all Elixir cells in order.
"""
@spec elixir_cells_with_section(t()) :: list({Cell.t(), Section.t()})
def elixir_cells_with_section(notebook) do
for section <- notebook.sections,
cell <- section.cells,
cell.type == :elixir,
do: {cell, section}
end
@doc """
Returns a list of Elixir cells (each with section) that the given cell depends on.
The cells are ordered starting from the most direct parent.
"""
@spec parent_cells_with_section(t(), Cell.id()) :: list({Cell.t(), Section.t()})
def parent_cells_with_section(notebook, cell_id) do
notebook
|> elixir_cells_with_section()
|> Enum.take_while(fn {cell, _} -> cell.id != cell_id end)
|> Enum.reverse()
end
@doc """
Returns a list of Elixir cells (each with section) that depend on the given cell.
The cells are ordered starting from the most direct child.
"""
@spec child_cells_with_section(t(), Cell.id()) :: list({Cell.t(), Section.t()})
def child_cells_with_section(notebook, cell_id) do
notebook
|> elixir_cells_with_section()
|> Enum.drop_while(fn {cell, _} -> cell.id != cell_id end)
|> Enum.drop(1)
end
end
| 28.342007 | 87 | 0.631689 |
792f2b202ba0149e063cc3362e6f3aa49234cf3a | 2,196 | ex | Elixir | clients/service_management/lib/google_api/service_management/v1/model/system_parameter_rule.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/service_management/lib/google_api/service_management/v1/model/system_parameter_rule.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/service_management/lib/google_api/service_management/v1/model/system_parameter_rule.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceManagement.V1.Model.SystemParameterRule do
@moduledoc """
Define a system parameter rule mapping system parameter definitions to
methods.
## Attributes
* `parameters` (*type:* `list(GoogleApi.ServiceManagement.V1.Model.SystemParameter.t)`, *default:* `nil`) - Define parameters. Multiple names may be defined for a parameter.
For a given method call, only one of them should be used. If multiple
names are used the behavior is implementation-dependent.
If none of the specified names are present the behavior is
parameter-dependent.
* `selector` (*type:* `String.t`, *default:* `nil`) - Selects the methods to which this rule applies. Use '*' to indicate all
methods in all APIs.
Refer to selector for syntax details.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:parameters => list(GoogleApi.ServiceManagement.V1.Model.SystemParameter.t()),
:selector => String.t()
}
field(:parameters, as: GoogleApi.ServiceManagement.V1.Model.SystemParameter, type: :list)
field(:selector)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceManagement.V1.Model.SystemParameterRule do
def decode(value, options) do
GoogleApi.ServiceManagement.V1.Model.SystemParameterRule.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceManagement.V1.Model.SystemParameterRule do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.862069 | 177 | 0.741348 |
792f3d2a69bbd4561d8605dbdf7ffc5372a65f51 | 4,944 | exs | Elixir | apps/asf_web/test/asf_web/controllers/user_settings_controller_test.exs | LazarRistic/asf | 2c557f06839a129b35174142c91f60696be2fa89 | [
"MIT"
] | null | null | null | apps/asf_web/test/asf_web/controllers/user_settings_controller_test.exs | LazarRistic/asf | 2c557f06839a129b35174142c91f60696be2fa89 | [
"MIT"
] | null | null | null | apps/asf_web/test/asf_web/controllers/user_settings_controller_test.exs | LazarRistic/asf | 2c557f06839a129b35174142c91f60696be2fa89 | [
"MIT"
] | null | null | null | defmodule AsfWeb.UserSettingsControllerTest do
use AsfWeb.ConnCase, async: true
alias Asf.Accounts
import Asf.AccountsFixtures
setup :register_and_log_in_user
describe "GET /users/settings" do
test "renders settings page", %{conn: conn} do
conn = get(conn, Routes.user_settings_path(conn, :edit))
response = html_response(conn, 200)
assert response =~ "<h1>Settings</h1>"
end
test "redirects if user is not logged in" do
conn = build_conn()
conn = get(conn, Routes.user_settings_path(conn, :edit))
assert redirected_to(conn) == Routes.user_session_path(conn, :new)
end
end
describe "PUT /users/settings (change password form)" do
test "updates the user password and resets tokens", %{conn: conn, user: user} do
new_password_conn =
put(conn, Routes.user_settings_path(conn, :update), %{
"action" => "update_password",
"current_password" => valid_user_password(),
"user" => %{
"password" => "new valid password",
"password_confirmation" => "new valid password"
}
})
assert redirected_to(new_password_conn) == Routes.user_settings_path(conn, :edit)
assert get_session(new_password_conn, :user_token) != get_session(conn, :user_token)
assert get_flash(new_password_conn, :info) =~ "Password updated successfully"
assert Accounts.get_user_by_email_and_password(user.email, "new valid password")
end
test "does not update password on invalid data", %{conn: conn} do
old_password_conn =
put(conn, Routes.user_settings_path(conn, :update), %{
"action" => "update_password",
"current_password" => "invalid",
"user" => %{
"password" => "too short",
"password_confirmation" => "does not match"
}
})
response = html_response(old_password_conn, 200)
assert response =~ "<h1>Settings</h1>"
assert response =~ "should be at least 12 character(s)"
assert response =~ "does not match password"
assert response =~ "is not valid"
assert get_session(old_password_conn, :user_token) == get_session(conn, :user_token)
end
end
describe "PUT /users/settings (change email form)" do
@tag :capture_log
test "updates the user email", %{conn: conn, user: user} do
conn =
put(conn, Routes.user_settings_path(conn, :update), %{
"action" => "update_email",
"current_password" => valid_user_password(),
"user" => %{"email" => unique_user_email()}
})
assert redirected_to(conn) == Routes.user_settings_path(conn, :edit)
assert get_flash(conn, :info) =~ "A link to confirm your email"
assert Accounts.get_user_by_email(user.email)
end
test "does not update email on invalid data", %{conn: conn} do
conn =
put(conn, Routes.user_settings_path(conn, :update), %{
"action" => "update_email",
"current_password" => "invalid",
"user" => %{"email" => "with spaces"}
})
response = html_response(conn, 200)
assert response =~ "<h1>Settings</h1>"
assert response =~ "must have the @ sign and no spaces"
assert response =~ "is not valid"
end
end
describe "GET /users/settings/confirm_email/:token" do
setup %{user: user} do
email = unique_user_email()
token =
extract_user_token(fn url ->
Accounts.deliver_update_email_instructions(%{user | email: email}, user.email, url)
end)
%{token: token, email: email}
end
test "updates the user email once", %{conn: conn, user: user, token: token, email: email} do
conn = get(conn, Routes.user_settings_path(conn, :confirm_email, token))
assert redirected_to(conn) == Routes.user_settings_path(conn, :edit)
assert get_flash(conn, :info) =~ "Email changed successfully"
refute Accounts.get_user_by_email(user.email)
assert Accounts.get_user_by_email(email)
conn = get(conn, Routes.user_settings_path(conn, :confirm_email, token))
assert redirected_to(conn) == Routes.user_settings_path(conn, :edit)
assert get_flash(conn, :error) =~ "Email change link is invalid or it has expired"
end
test "does not update email with invalid token", %{conn: conn, user: user} do
conn = get(conn, Routes.user_settings_path(conn, :confirm_email, "oops"))
assert redirected_to(conn) == Routes.user_settings_path(conn, :edit)
assert get_flash(conn, :error) =~ "Email change link is invalid or it has expired"
assert Accounts.get_user_by_email(user.email)
end
test "redirects if user is not logged in", %{token: token} do
conn = build_conn()
conn = get(conn, Routes.user_settings_path(conn, :confirm_email, token))
assert redirected_to(conn) == Routes.user_session_path(conn, :new)
end
end
end
| 38.030769 | 96 | 0.649676 |
792f5932764d1eaa265a722565dfa08b4a0d53e0 | 1,097 | ex | Elixir | hangman/b2/lib/b2_web/router.ex | neal-bpm/TinyHangman | c2b9bcfe9b3c6d918e83271dc943f09706693b4e | [
"MIT"
] | null | null | null | hangman/b2/lib/b2_web/router.ex | neal-bpm/TinyHangman | c2b9bcfe9b3c6d918e83271dc943f09706693b4e | [
"MIT"
] | null | null | null | hangman/b2/lib/b2_web/router.ex | neal-bpm/TinyHangman | c2b9bcfe9b3c6d918e83271dc943f09706693b4e | [
"MIT"
] | null | null | null | defmodule B2Web.Router do
use B2Web, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_live_flash
plug :put_root_layout, {B2Web.LayoutView, :root}
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", B2Web do
pipe_through :browser
live "/", Live.Game
end
# Other scopes may use custom stacks.
# scope "/api", B2Web do
# pipe_through :api
# end
# Enables LiveDashboard only for development
#
# If you want to use the LiveDashboard in production, you should put
# it behind authentication and allow only admins to access it.
# If your application does not have an admins-only section yet,
# you can use Plug.BasicAuth to set up some basic authentication
# as long as you are also using SSL (which you should anyway).
if Mix.env() in [:dev, :test] do
import Phoenix.LiveDashboard.Router
scope "/" do
pipe_through :browser
live_dashboard "/dashboard", metrics: B2Web.Telemetry
end
end
end
| 24.377778 | 70 | 0.691887 |
792f5e45362731b1349c2a5bc3a32a833e01337e | 1,595 | ex | Elixir | lib/flamelex/gui/supervision_tree/memex_stagemanager.ex | JediLuke/flamelex | b38d1171b8f93375d8dc59f1710442860b6c8580 | [
"Apache-2.0"
] | 10 | 2021-03-02T20:05:13.000Z | 2022-03-14T21:10:39.000Z | lib/flamelex/gui/supervision_tree/memex_stagemanager.ex | JediLuke/flamelex | b38d1171b8f93375d8dc59f1710442860b6c8580 | [
"Apache-2.0"
] | 2 | 2021-12-14T18:29:44.000Z | 2021-12-23T20:38:27.000Z | lib/flamelex/gui/supervision_tree/memex_stagemanager.ex | JediLuke/flamelex | b38d1171b8f93375d8dc59f1710442860b6c8580 | [
"Apache-2.0"
] | 2 | 2021-12-05T20:41:26.000Z | 2021-12-26T01:46:42.000Z | defmodule Flamelex.GUI.StageManager.Memex do
@moduledoc """
This process holds state for when we use Vim commands.
"""
use GenServer
use Flamelex.ProjectAliases
require Logger
def start_link(args) do
GenServer.start_link(__MODULE__, args)
end
def init(_args) do
Logger.debug "#{__MODULE__} initializing..."
Process.register(self(), __MODULE__)
{:ok, %{open: []}}
end
def handle_cast(:memex_open, %{open: []} = state) do
Logger.debug "#{__MODULE__} recv'd: :memex_open"
{:noreply, state}
end
def handle_cast(:open_random_tidbit, state) do
# t = Memex.random()
# # GenServer.cast(:hypercard, {:new_tidbit, t})
# GenServer.cast(Flamelex.GUI.Component.Memex.HyperCard, {:new_tidbit, t})
Logger.debug "#{__MODULE__} recv'd msg: :open_random_tidbit"
t = Memex.My.Wiki.list |> Enum.random()
new_state = %{state|open: state.open ++ [t]}
GenServer.cast(Flamelex.GUI.Component.Memex.StoryRiver, {:add_tidbit, t})
{:noreply, new_state}
end
def handle_cast({:open_tidbit, t}, state) do
Logger.debug "#{__MODULE__} recv'd msg: {:open_random, #{t.title}}"
new_state = %{state|open: state.open ++ [t]}
GenServer.cast(Flamelex.GUI.Component.Memex.StoryRiver, {:add_tidbit, t})
{:noreply, new_state}
end
def handle_call(:get_open_tidbits, _from, %{open: []} = state) do
Logger.warn "Dont wanna open empty Memex yet lol, just render a rando..."
#TODO fix the bug vacarsu found here
rando = Memex.My.Wiki.list |> Enum.random()
{:reply, {:ok, [rando]}, %{open: [rando]}}
end
end
| 31.27451 | 78 | 0.665204 |
792f684b276904aa5038389d670fd03c6d75f256 | 1,083 | ex | Elixir | lib/exp_web/live/tag_live/form_component.ex | karloescota/exp | 77267b80febf6d738b3ac6b6203795feef01e666 | [
"MIT"
] | null | null | null | lib/exp_web/live/tag_live/form_component.ex | karloescota/exp | 77267b80febf6d738b3ac6b6203795feef01e666 | [
"MIT"
] | null | null | null | lib/exp_web/live/tag_live/form_component.ex | karloescota/exp | 77267b80febf6d738b3ac6b6203795feef01e666 | [
"MIT"
] | null | null | null | defmodule ExpWeb.TagLive.FormComponent do
use ExpWeb, :live_component
alias Exp.Tags
@impl true
def update(%{tag: tag} = assigns, socket) do
changeset = Tags.change_tag(tag)
{:ok,
socket
|> assign(assigns)
|> assign(:changeset, changeset)}
end
@impl true
def handle_event("validate", %{"tag" => tag_params}, socket) do
changeset =
socket.assigns.tag
|> Tags.change_tag(tag_params)
|> Map.put(:action, :validate)
{:noreply, assign(socket, :changeset, changeset)}
end
def handle_event("save", %{"tag" => tag_params}, socket) do
save_expense(socket, socket.assigns.action, tag_params)
end
defp save_expense(socket, :new, tag_params) do
case Tags.create_tag(socket.assigns.current_user, tag_params) do
{:ok, _expense} ->
{:noreply,
socket
|> put_flash(:info, "Tag created successfully")
|> push_redirect(to: socket.assigns.return_to)}
{:error, %Ecto.Changeset{} = changeset} ->
{:noreply, assign(socket, changeset: changeset)}
end
end
end
| 25.186047 | 68 | 0.641736 |
792f8d8799e86626952669a16c8ab5b9cc81f2e4 | 518 | exs | Elixir | config/test.exs | ndac-todoroki/DiscordSplatoonBot | 6a082b0352684cb64d36fe9116e7f060691cba37 | [
"MIT"
] | 6 | 2017-08-10T13:57:06.000Z | 2019-01-17T08:48:40.000Z | config/test.exs | ndac-todoroki/DiscordSplatoonBot | 6a082b0352684cb64d36fe9116e7f060691cba37 | [
"MIT"
] | 14 | 2017-08-08T13:07:00.000Z | 2019-02-28T15:10:18.000Z | config/test.exs | ndac-todoroki/DiscordSplatoonBot | 6a082b0352684cb64d36fe9116e7f060691cba37 | [
"MIT"
] | null | null | null | use Mix.Config
# Print only warnings and errors during test
config :logger, level: :warn
# Goth is used inside of Kane, and needs to read Google credentials
config :goth,
json: "google_json_creds/discord_splatoon_bot.json" |> Path.expand(__DIR__) |> File.read!()
# Discord Bot Settings
config :nostrum,
# The token of your bot as a string
token: "MzQzNTg3OTExNTQwNjcwNDc0.D0wNSQ.bveLr_nVBkn2kk-3ua-wHEZLH44",
# The number of shards you want to run your bot under, or :auto.
num_shards: :auto,
bot_id: 0
| 30.470588 | 93 | 0.747104 |
792f9af915e2f830388fb7623dc7ffe515f3b959 | 1,121 | exs | Elixir | demo/supandgen/config/config.exs | koenighotze/talks-elixir-intro | d0a5cbbd0fef5752dc3a137e828f9c836566b5a5 | [
"MIT"
] | 2 | 2015-09-15T05:40:07.000Z | 2016-06-25T13:26:00.000Z | demo/supandgen/config/config.exs | koenighotze/talks-elixir-intro | d0a5cbbd0fef5752dc3a137e828f9c836566b5a5 | [
"MIT"
] | null | null | null | demo/supandgen/config/config.exs | koenighotze/talks-elixir-intro | d0a5cbbd0fef5752dc3a137e828f9c836566b5a5 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :supandgen, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:supandgen, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.16129 | 73 | 0.752007 |
792fa8fee729465d1797602ff5c026cc70d48beb | 1,558 | exs | Elixir | mix.exs | kowaliklukasz/phoenix-checkers | 30021838cc1b9fa2dac83e698d0844e7e281dc26 | [
"MIT"
] | null | null | null | mix.exs | kowaliklukasz/phoenix-checkers | 30021838cc1b9fa2dac83e698d0844e7e281dc26 | [
"MIT"
] | null | null | null | mix.exs | kowaliklukasz/phoenix-checkers | 30021838cc1b9fa2dac83e698d0844e7e281dc26 | [
"MIT"
] | null | null | null | defmodule PhoenixCheckers.MixProject do
use Mix.Project
def project do
[
app: :phoenix_checkers,
version: "0.1.0",
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {PhoenixCheckers.Application, []},
extra_applications: [:logger, :runtime_tools, :crypto]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.5.8"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_dashboard, "~> 0.4"},
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 0.4"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "cmd npm install --prefix assets"]
]
end
end
| 25.966667 | 84 | 0.598845 |
792fad225e7d72367ac28cc4cbeb119fb07b8547 | 309 | ex | Elixir | lib/fireball_web/resolvers/game_resolver.ex | Catharz/fireball | bb9f123c0d2d71b16eda01d3838565e6e762de20 | [
"MIT"
] | null | null | null | lib/fireball_web/resolvers/game_resolver.ex | Catharz/fireball | bb9f123c0d2d71b16eda01d3838565e6e762de20 | [
"MIT"
] | null | null | null | lib/fireball_web/resolvers/game_resolver.ex | Catharz/fireball | bb9f123c0d2d71b16eda01d3838565e6e762de20 | [
"MIT"
] | null | null | null | defmodule FireballWeb.Resolvers.GameResolver do
@spec active_games(%Game{}, Absinthe.Resolution.t()) :: {atom, [%Game{}]}
def active_games(_args, _info) do
now = Timex.now()
{:ok, start_time} = Timex.format(now, "{ISO:Extended}")
{:ok, [%Game{start_time: start_time, players: []}]}
end
end
| 30.9 | 75 | 0.660194 |
792fb7b8b5a64fb8809acdd772ed000325bfa8cb | 6,726 | ex | Elixir | lib/commanded/commands/dispatcher.ex | zven21/commanded | bd3be98a225629816127cf8ef6624632cd09766e | [
"MIT"
] | null | null | null | lib/commanded/commands/dispatcher.ex | zven21/commanded | bd3be98a225629816127cf8ef6624632cd09766e | [
"MIT"
] | null | null | null | lib/commanded/commands/dispatcher.ex | zven21/commanded | bd3be98a225629816127cf8ef6624632cd09766e | [
"MIT"
] | null | null | null | defmodule Commanded.Commands.Dispatcher do
@moduledoc false
require Logger
alias Commanded.Aggregates.Aggregate
alias Commanded.Aggregates.ExecutionContext
alias Commanded.Middleware.Pipeline
alias Commanded.Telemetry
defmodule Payload do
@moduledoc false
defstruct [
:application,
:command,
:command_uuid,
:causation_id,
:correlation_id,
:consistency,
:handler_module,
:handler_function,
:handler_before_execute,
:aggregate_module,
:identity,
:identity_prefix,
:timeout,
:lifespan,
:metadata,
:retry_attempts,
:returning,
middleware: []
]
end
# Dispatch the given command to the handler module for the aggregate as
# identified.
@spec dispatch(payload :: struct) ::
:ok
| {:ok, aggregate_state :: struct}
| {:ok, aggregate_version :: non_neg_integer()}
| {:ok, events :: list(struct)}
| {:ok, Commanded.Commands.ExecutionResult.t()}
| {:error, error :: term}
def dispatch(%Payload{} = payload) do
pipeline = to_pipeline(payload)
telemetry_metadata = telemetry_metadata(pipeline, payload)
start_time = telemetry_start(telemetry_metadata)
pipeline = before_dispatch(pipeline, payload)
# Stop command execution if pipeline has been halted
unless Pipeline.halted?(pipeline) do
context = to_execution_context(pipeline, payload)
pipeline
|> execute(payload, context)
|> telemetry_stop(start_time, telemetry_metadata)
|> Pipeline.response()
else
pipeline
|> after_failure(payload)
|> telemetry_stop(start_time, telemetry_metadata)
|> Pipeline.response()
end
end
defp to_pipeline(%Payload{} = payload) do
struct(Pipeline, Map.from_struct(payload))
end
defp execute(%Pipeline{} = pipeline, %Payload{} = payload, %ExecutionContext{} = context) do
%Pipeline{application: application, assigns: %{aggregate_uuid: aggregate_uuid}} = pipeline
%Payload{aggregate_module: aggregate_module, timeout: timeout} = payload
{:ok, ^aggregate_uuid} =
Commanded.Aggregates.Supervisor.open_aggregate(
application,
aggregate_module,
aggregate_uuid
)
task_dispatcher_name = Module.concat([application, Commanded.Commands.TaskDispatcher])
task =
Task.Supervisor.async_nolink(task_dispatcher_name, Aggregate, :execute, [
application,
aggregate_module,
aggregate_uuid,
context,
timeout
])
result =
case Task.yield(task, timeout) || Task.shutdown(task) do
{:ok, result} -> result
{:exit, {:normal, :aggregate_stopped}} = result -> result
{:exit, _reason} -> {:error, :aggregate_execution_failed}
nil -> {:error, :aggregate_execution_timeout}
end
case result do
{:ok, aggregate_version, events} ->
pipeline
|> Pipeline.assign(:aggregate_version, aggregate_version)
|> Pipeline.assign(:events, events)
|> after_dispatch(payload)
|> Pipeline.respond(:ok)
{:ok, aggregate_version, events, reply} ->
pipeline
|> Pipeline.assign(:aggregate_version, aggregate_version)
|> Pipeline.assign(:events, events)
|> after_dispatch(payload)
|> Pipeline.respond({:ok, reply})
{:exit, {:normal, :aggregate_stopped}} ->
# Maybe retry command when aggregate process stopped by lifespan timeout
case ExecutionContext.retry(context) do
{:ok, context} ->
execute(pipeline, payload, context)
reply ->
reply
end
{:error, error} ->
pipeline
|> Pipeline.respond({:error, error})
|> after_failure(payload)
{:error, error, reason} ->
pipeline
|> Pipeline.assign(:error_reason, reason)
|> Pipeline.respond({:error, error})
|> after_failure(payload)
end
end
defp to_execution_context(%Pipeline{} = pipeline, %Payload{} = payload) do
%Pipeline{command: command, command_uuid: command_uuid, metadata: metadata} = pipeline
%Payload{
correlation_id: correlation_id,
handler_module: handler_module,
handler_function: handler_function,
handler_before_execute: handler_before_execute,
lifespan: lifespan,
retry_attempts: retry_attempts,
returning: returning
} = payload
%ExecutionContext{
command: command,
causation_id: command_uuid,
correlation_id: correlation_id,
metadata: metadata,
handler: handler_module,
function: handler_function,
before_execute: handler_before_execute,
lifespan: lifespan,
retry_attempts: retry_attempts,
returning: returning
}
end
defp before_dispatch(%Pipeline{} = pipeline, %Payload{middleware: middleware}) do
Pipeline.chain(pipeline, :before_dispatch, middleware)
end
defp after_dispatch(%Pipeline{} = pipeline, %Payload{middleware: middleware}) do
Pipeline.chain(pipeline, :after_dispatch, middleware)
end
defp after_failure(%Pipeline{response: {:error, error}} = pipeline, %Payload{} = payload) do
%Payload{middleware: middleware} = payload
pipeline
|> Pipeline.assign(:error, error)
|> Pipeline.chain(:after_failure, middleware)
end
defp after_failure(
%Pipeline{response: {:error, error, reason}} = pipeline,
%Payload{} = payload
) do
%Payload{middleware: middleware} = payload
pipeline
|> Pipeline.assign(:error, error)
|> Pipeline.assign(:error_reason, reason)
|> Pipeline.chain(:after_failure, middleware)
end
defp after_failure(%Pipeline{} = pipeline, %Payload{} = payload) do
%Payload{middleware: middleware} = payload
Pipeline.chain(pipeline, :after_failure, middleware)
end
defp telemetry_start(telemetry_metadata) do
Telemetry.start([:commanded, :application, :dispatch], telemetry_metadata)
end
defp telemetry_stop(%Pipeline{assigns: assigns} = pipeline, start_time, telemetry_metadata) do
event_prefix = [:commanded, :application, :dispatch]
case assigns do
%{error: error} ->
Telemetry.stop(event_prefix, start_time, Map.put(telemetry_metadata, :error, error))
_ ->
Telemetry.stop(event_prefix, start_time, telemetry_metadata)
end
pipeline
end
defp telemetry_metadata(%Pipeline{} = pipeline, %Payload{} = payload) do
%Payload{application: application} = payload
context = to_execution_context(pipeline, payload)
%{
application: application,
error: nil,
execution_context: context
}
end
end
| 28.991379 | 96 | 0.659828 |
792feefd9cca1b9b6174da077c90497b2b43e811 | 1,583 | ex | Elixir | goal_light_ui/lib/goal_light_ui_web/endpoint.ex | TheEndIsNear/goal_light | 8456189832130a5bbfb641b275146de413ee6fa8 | [
"MIT"
] | null | null | null | goal_light_ui/lib/goal_light_ui_web/endpoint.ex | TheEndIsNear/goal_light | 8456189832130a5bbfb641b275146de413ee6fa8 | [
"MIT"
] | null | null | null | goal_light_ui/lib/goal_light_ui_web/endpoint.ex | TheEndIsNear/goal_light | 8456189832130a5bbfb641b275146de413ee6fa8 | [
"MIT"
] | null | null | null | defmodule GoalLightUiWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :goal_light_ui
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_goal_light_ui_key",
signing_salt: "qF214Yob"
]
socket "/socket", GoalLightUiWeb.UserSocket,
websocket: true,
longpoll: false
socket "/live", Phoenix.LiveView.Socket, websocket: [connect_info: [session: @session_options]]
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :goal_light_ui,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Phoenix.LiveDashboard.RequestLogger,
param_key: "request_logger",
cookie_key: "request_logger"
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
plug GoalLightUiWeb.Router
end
| 29.314815 | 97 | 0.722047 |
793032d87231b95288df47b8d3f8dd36a931a45a | 494 | exs | Elixir | config/test.exs | gautambaghel/fighter | 970a098f0d234892af351070b6b2b596b9a2d83c | [
"Apache-2.0"
] | null | null | null | config/test.exs | gautambaghel/fighter | 970a098f0d234892af351070b6b2b596b9a2d83c | [
"Apache-2.0"
] | null | null | null | config/test.exs | gautambaghel/fighter | 970a098f0d234892af351070b6b2b596b9a2d83c | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :fighter, FighterWeb.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :fighter, Fighter.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "fighter_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
| 24.7 | 56 | 0.734818 |
79303bc2d827a9bf5258d205655435e768694823 | 975 | ex | Elixir | clients/test_client/lib/google_api/test_client/v2/deserializer.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/test_client/lib/google_api/test_client/v2/deserializer.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/test_client/lib/google_api/test_client/v2/deserializer.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.TestClient.V2.Deserializer do
@moduledoc """
Helper functions for deserializing responses into models.
This module is no longer used. Please use GoogleApi.Gax.ModelBase instead.
"""
end
| 37.5 | 77 | 0.766154 |
79303f31d4e26ce695e61033fff9e1e15b9666a9 | 1,559 | ex | Elixir | clients/fusion_tables/lib/google_api/fusion_tables/v2/model/line.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/fusion_tables/lib/google_api/fusion_tables/v2/model/line.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/fusion_tables/lib/google_api/fusion_tables/v2/model/line.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.FusionTables.V2.Model.Line do
@moduledoc """
Represents a line geometry.
## Attributes
* `coordinates` (*type:* `list(list(float()))`, *default:* `nil`) - The coordinates that define the line.
* `type` (*type:* `String.t`, *default:* `LineString`) - Type: A line geometry.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:coordinates => list(list(float())),
:type => String.t()
}
field(:coordinates, type: :list)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.FusionTables.V2.Model.Line do
def decode(value, options) do
GoogleApi.FusionTables.V2.Model.Line.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.FusionTables.V2.Model.Line do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.18 | 109 | 0.713278 |
793084fc62b66a0d686ad30df29e28983f47a398 | 2,176 | exs | Elixir | config/prod.exs | benlime/dashwallet | 90754cf9cda72b289d5b802cd9fd7eb094f08acb | [
"MIT"
] | 2 | 2017-11-15T20:47:47.000Z | 2017-12-02T11:29:10.000Z | config/prod.exs | benlime/dashwallet | 90754cf9cda72b289d5b802cd9fd7eb094f08acb | [
"MIT"
] | null | null | null | config/prod.exs | benlime/dashwallet | 90754cf9cda72b289d5b802cd9fd7eb094f08acb | [
"MIT"
] | null | null | null | use Mix.Config
# For production, we often load configuration from external
# sources, such as your system environment. For this reason,
# you won't find the :http configuration below, but set inside
# DashwalletWeb.Endpoint.init/2 when load_from_system_env is
# true. Any dynamic configuration should be done there.
#
# Don't forget to configure the url host to something meaningful,
# Phoenix uses this information when generating URLs.
#
# Finally, we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the mix phx.digest task
# which you typically run after static files are built.
config :dashwallet, DashwalletWeb.Endpoint,
load_from_system_env: true,
cache_static_manifest: "priv/static/cache_manifest.json",
server: true
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :dashwallet, DashwalletWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [:inet6,
# port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :dashwallet, DashwalletWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :dashwallet, DashwalletWeb.Endpoint, server: true
#
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"
| 33.476923 | 67 | 0.727022 |
79308b1a4d6cbdfa1bd3ced8e7c92b7af66b51e4 | 6,058 | ex | Elixir | lib/sanbase_web/graphql/resolvers/insight_resolver.ex | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | null | null | null | lib/sanbase_web/graphql/resolvers/insight_resolver.ex | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | 1 | 2021-07-24T16:26:03.000Z | 2021-07-24T16:26:03.000Z | lib/sanbase_web/graphql/resolvers/insight_resolver.ex | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | null | null | null | defmodule SanbaseWeb.Graphql.Resolvers.InsightResolver do
require Logger
import Absinthe.Resolution.Helpers, except: [async: 1]
alias SanbaseWeb.Graphql.SanbaseDataloader
alias Sanbase.Auth.User
alias Sanbase.Vote
alias Sanbase.Insight.Post
alias Sanbase.Comments.EntityComment
alias Sanbase.Repo
alias SanbaseWeb.Graphql.Helpers.Utils
require Logger
def insights(%User{} = user, args, _resolution) do
opts = [
is_pulse: Map.get(args, :is_pulse),
is_paywall_required: Map.get(args, :is_paywall_required)
]
{:ok, Post.user_insights(user.id, opts)}
end
def public_insights(%User{} = user, args, _resolution) do
opts = [
is_pulse: Map.get(args, :is_pulse),
is_paywall_required: Map.get(args, :is_paywall_required)
]
{:ok, Post.user_public_insights(user.id, opts)}
end
def related_projects(%Post{} = post, _, _) do
Post.related_projects(post)
end
def post(_root, %{id: post_id}, _resolution) do
Post.by_id(post_id)
end
def all_insights(_root, %{tags: tags, page: page, page_size: page_size} = args, _context)
when is_list(tags) do
opts = [
is_pulse: Map.get(args, :is_pulse),
is_paywall_required: Map.get(args, :is_paywall_required)
]
posts = Post.public_insights_by_tags(tags, page, page_size, opts)
{:ok, posts}
end
def all_insights(_root, %{page: page, page_size: page_size} = args, _resolution) do
opts = [
is_pulse: Map.get(args, :is_pulse),
is_paywall_required: Map.get(args, :is_paywall_required)
]
posts = Post.public_insights(page, page_size, opts)
{:ok, posts}
end
def all_insights_for_user(_root, %{user_id: user_id} = args, _context) do
opts = [
is_pulse: Map.get(args, :is_pulse),
is_paywall_required: Map.get(args, :is_paywall_required)
]
posts = Post.user_public_insights(user_id, opts)
{:ok, posts}
end
def all_insights_user_voted_for(_root, %{user_id: user_id} = args, _context) do
opts = [
is_pulse: Map.get(args, :is_pulse),
is_paywall_required: Map.get(args, :is_paywall_required)
]
posts = Post.all_insights_user_voted_for(user_id, opts)
{:ok, posts}
end
def all_insights_by_tag(_root, %{tag: tag} = args, _context) do
opts = [
is_pulse: Map.get(args, :is_pulse),
is_paywall_required: Map.get(args, :is_paywall_required)
]
posts = Post.public_insights_by_tags([tag], opts)
{:ok, posts}
end
def create_post(_root, args, %{context: %{auth: %{current_user: user}}}) do
Post.create(user, args)
end
def update_post(_root, %{id: post_id} = args, %{
context: %{auth: %{current_user: %User{} = user}}
}) do
Post.update(post_id, user, args)
end
def delete_post(_root, %{id: post_id}, %{
context: %{auth: %{current_user: %User{} = user}}
}) do
Post.delete(post_id, user)
end
def publish_insight(_root, %{id: post_id}, %{
context: %{auth: %{current_user: %User{id: user_id}}}
}) do
Post.publish(post_id, user_id)
end
def all_tags(_root, _args, _context) do
{:ok, Sanbase.Tag.all()}
end
@doc ~s"""
Returns a tuple `{total_votes, total_san_votes}` where:
- `total_votes` represents the number of votes where each vote's weight is 1
- `total_san_votes` represents the number of votes where each vote's weight is
equal to the san balance of the voter
"""
def votes(%Post{} = post, _args, _context) do
{total_votes, total_san_votes} =
post
|> Repo.preload(votes: [user: :eth_accounts])
|> Map.get(:votes)
|> Stream.map(&Map.get(&1, :user))
|> Stream.map(&User.san_balance!/1)
|> Enum.reduce({0, 0}, fn san_balance, {votes, san_token_votes} ->
{votes + 1, san_token_votes + san_balance}
end)
{:ok,
%{
total_votes: total_votes,
total_san_votes: total_san_votes |> Sanbase.Math.to_integer()
}}
end
def voted_at(%Post{} = post, _args, %{
context: %{auth: %{current_user: user}}
}) do
post
|> Repo.preload([:votes])
|> Map.get(:votes, [])
|> Enum.find(&(&1.user_id == user.id))
|> case do
nil -> {:ok, nil}
vote -> {:ok, vote.inserted_at}
end
end
def voted_at(%Post{}, _args, _context), do: {:ok, nil}
def vote(_root, args, %{context: %{auth: %{current_user: user}}}) do
insight_id = Map.get(args, :insight_id) || Map.fetch!(args, :post_id)
Vote.create(%{post_id: insight_id, user_id: user.id})
|> case do
{:ok, _vote} ->
Post.by_id(insight_id)
{:error, changeset} ->
{
:error,
message: "Can't vote for post with id #{insight_id}",
details: Utils.error_details(changeset)
}
end
end
def unvote(_root, args, %{context: %{auth: %{current_user: user}}}) do
insight_id = Map.get(args, :insight_id) || Map.fetch!(args, :post_id)
with %Vote{} = vote <- Vote.get_by_opts(post_id: insight_id, user_id: user.id),
{:ok, _vote} <- Vote.remove(vote) do
Post.by_id(insight_id)
else
_error ->
{:error, "Can't remove vote for post with id #{insight_id}"}
end
end
# Note: deprecated - should be removed if not used by frontend
def insight_comments(_root, %{insight_id: post_id} = args, _resolution) do
comments =
EntityComment.get_comments(:insight, post_id, args)
|> Enum.map(& &1.comment)
{:ok, comments}
end
def insight_id(%{id: id}, _args, %{context: %{loader: loader}}) do
loader
|> Dataloader.load(SanbaseDataloader, :comment_insight_id, id)
|> on_load(fn loader ->
{:ok, Dataloader.get(loader, SanbaseDataloader, :comment_insight_id, id)}
end)
end
def comments_count(%{id: id}, _args, %{context: %{loader: loader}}) do
loader
|> Dataloader.load(SanbaseDataloader, :insights_comments_count, id)
|> on_load(fn loader ->
{:ok, Dataloader.get(loader, SanbaseDataloader, :insights_comments_count, id) || 0}
end)
end
end
| 27.917051 | 91 | 0.636679 |
7930c4d11b54ce1e61f945edb21ab8f652bff77d | 2,348 | ex | Elixir | lib/auto_api/states/remote_control_state.ex | highmobility/hm-auto-api-elixir | 026c3f50871c56877a4acd5f39a8887118a87bb5 | [
"MIT"
] | 4 | 2018-01-19T16:11:10.000Z | 2019-12-13T16:35:10.000Z | lib/auto_api/states/remote_control_state.ex | highmobility/auto-api-elixir | 026c3f50871c56877a4acd5f39a8887118a87bb5 | [
"MIT"
] | 5 | 2020-07-16T07:20:21.000Z | 2021-09-22T10:18:04.000Z | lib/auto_api/states/remote_control_state.ex | highmobility/hm-auto-api-elixir | 026c3f50871c56877a4acd5f39a8887118a87bb5 | [
"MIT"
] | 1 | 2021-02-17T18:36:13.000Z | 2021-02-17T18:36:13.000Z | # AutoAPI
# The MIT License
#
# Copyright (c) 2018- High-Mobility GmbH (https://high-mobility.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
defmodule AutoApi.RemoteControlState do
@moduledoc """
RemoteControl state
"""
alias AutoApi.{State, UnitType}
use AutoApi.State, spec_file: "remote_control.json"
@type modes ::
:unavailable
| :available
| :started
| :failed_to_start
| :aborted
| :ended
@type t :: %__MODULE__{
control_mode: State.property(modes),
angle: State.property(UnitType.angle()),
speed: State.property(UnitType.speed())
}
@doc """
Build state based on binary value
iex> bin = <<1, 0, 4, 1, 0, 1, 2>>
iex> AutoApi.RemoteControlState.from_bin(bin)
%AutoApi.RemoteControlState{control_mode: %AutoApi.Property{data: :started}}
"""
@spec from_bin(binary) :: __MODULE__.t()
def from_bin(bin) do
parse_bin_properties(bin, %__MODULE__{})
end
@doc """
Parse state to bin
iex> state = %AutoApi.RemoteControlState{control_mode: %AutoApi.Property{data: :started}}
iex> AutoApi.RemoteControlState.to_bin(state)
<<1, 0, 4, 1, 0, 1, 2>>
"""
@spec to_bin(__MODULE__.t()) :: binary
def to_bin(%__MODULE__{} = state) do
parse_state_properties(state)
end
end
| 33.542857 | 93 | 0.70017 |
7930fb235fd394b49833836b284501d7ffda7edf | 2,165 | ex | Elixir | lib/verk/supervisor.ex | rubikill/verk | 0afc45b7141f3cba52a64f3cf70319ce39b6b845 | [
"MIT"
] | 746 | 2015-12-13T09:55:47.000Z | 2022-03-29T21:57:55.000Z | lib/verk/supervisor.ex | rubikill/verk | 0afc45b7141f3cba52a64f3cf70319ce39b6b845 | [
"MIT"
] | 176 | 2015-12-16T08:05:19.000Z | 2021-12-21T03:38:55.000Z | lib/verk/supervisor.ex | rubikill/verk | 0afc45b7141f3cba52a64f3cf70319ce39b6b845 | [
"MIT"
] | 78 | 2015-12-16T09:09:52.000Z | 2021-11-05T15:06:03.000Z | defmodule Verk.Supervisor do
@moduledoc """
Supervisor definition for Verk application. It consists of:
* `Verk.ScheduleManager`
* GenStage producer named `Verk.EventProducer`
* GenStage consumer `Verk.QueueStats`
* Redis connectionn named `Verk.Redis`
* A `Verk.Queue.Supervisor` per queue
"""
use Supervisor
@doc """
It starts the main supervisor
"""
def start_link(_ \\ []) do
Supervisor.start_link(__MODULE__, [], name: __MODULE__)
end
@doc false
def init(_) do
local_verk_node_id = verk_node_id()
Application.put_env(:verk, :local_node_id, local_verk_node_id)
supervise(children(), strategy: :one_for_one)
end
defp children do
redis_url = Confex.get_env(:verk, :redis_url)
shutdown_timeout = Confex.get_env(:verk, :shutdown_timeout, 30_000)
generate_node_id = Confex.get_env(:verk, :generate_node_id)
redis = worker(Redix, [redis_url, [name: Verk.Redis]], id: Verk.Redis)
event_producer = worker(Verk.EventProducer, [], id: Verk.EventProducer)
queue_stats = worker(Verk.QueueStats, [], id: Verk.QueueStats)
schedule_manager = worker(Verk.ScheduleManager, [], id: Verk.ScheduleManager)
manager_sup = supervisor(Verk.Manager.Supervisor, [], id: Verk.Manager.Supervisor)
drainer =
worker(
Verk.QueuesDrainer,
[shutdown_timeout],
id: Verk.QueuesDrainer,
shutdown: shutdown_timeout
)
children = [
redis,
event_producer,
queue_stats,
schedule_manager,
manager_sup,
drainer
]
if generate_node_id do
node_manager = worker(Verk.Node.Manager, [], id: Verk.Node.Manager)
List.insert_at(children, 1, node_manager)
else
children
end
end
defp verk_node_id do
case Application.fetch_env(:verk, :local_node_id) do
{:ok, local_verk_node_id} ->
local_verk_node_id
:error ->
if Confex.get_env(:verk, :generate_node_id, false) do
<<part1::32, part2::32>> = :crypto.strong_rand_bytes(8)
"#{part1}#{part2}"
else
Confex.get_env(:verk, :node_id, "1")
end
end
end
end
| 26.728395 | 86 | 0.657275 |
7930fbb62bcd2f9ac26a01d012fdd5541dcb7918 | 725 | ex | Elixir | web/gettext.ex | JohnB/phoenix_test_app | 445a19a136865a074a8c59cf97ffeee902383c90 | [
"MIT"
] | null | null | null | web/gettext.ex | JohnB/phoenix_test_app | 445a19a136865a074a8c59cf97ffeee902383c90 | [
"MIT"
] | null | null | null | web/gettext.ex | JohnB/phoenix_test_app | 445a19a136865a074a8c59cf97ffeee902383c90 | [
"MIT"
] | null | null | null | defmodule PhoenixTestApp.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import PhoenixTestApp.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :phoenix_test_app
end
| 29 | 72 | 0.688276 |
793116ae6ef09ed0db72f54cab084e7073de712e | 753 | ex | Elixir | instrumentation/opentelemetry_oban/test/support/data_case.ex | RudolfMan/opentelemetry-erlang-contrib | 44fd2a6871742380dd6adc112f9776cda501ff1f | [
"Apache-2.0"
] | 24 | 2021-05-07T18:37:11.000Z | 2022-03-13T06:21:00.000Z | instrumentation/opentelemetry_oban/test/support/data_case.ex | RudolfMan/opentelemetry-erlang-contrib | 44fd2a6871742380dd6adc112f9776cda501ff1f | [
"Apache-2.0"
] | 42 | 2021-05-10T20:19:22.000Z | 2022-03-31T17:48:13.000Z | instrumentation/opentelemetry_oban/test/support/data_case.ex | RudolfMan/opentelemetry-erlang-contrib | 44fd2a6871742380dd6adc112f9776cda501ff1f | [
"Apache-2.0"
] | 19 | 2021-08-30T01:33:54.000Z | 2022-03-20T22:01:15.000Z | defmodule DataCase do
@moduledoc """
This module defines the setup for tests requiring access to the data layer.
You may define functions here to be used as helpers in your tests.
Finally, if the test case interacts with the database, it cannot be async.
For this reason, every test runs inside a transaction which is reset at the
beginning of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
use Oban.Testing, repo: TestRepo
import Ecto
import DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(TestRepo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(TestRepo, {:shared, self()})
end
:ok
end
end
| 22.818182 | 77 | 0.699867 |
79311aa6dfbfdf0cc825acd012116f93d9fca6be | 676 | exs | Elixir | 05_simple_math/mix.exs | mkchandler/elixir-school | cc08bd723db00cc35cd2d6b07abe519e5d102ca0 | [
"MIT"
] | 1 | 2015-12-15T04:46:41.000Z | 2015-12-15T04:46:41.000Z | 05_simple_math/mix.exs | mkchandler/elixir-school | cc08bd723db00cc35cd2d6b07abe519e5d102ca0 | [
"MIT"
] | null | null | null | 05_simple_math/mix.exs | mkchandler/elixir-school | cc08bd723db00cc35cd2d6b07abe519e5d102ca0 | [
"MIT"
] | null | null | null | defmodule SimpleMath.Mixfile do
use Mix.Project
def project do
[app: :simple_math,
version: "0.1.0",
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[]
end
end
| 20.484848 | 77 | 0.610947 |
79314cd3a71945c192a8d9dc0bbfb14163968bc4 | 6,880 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/image_settings.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/image_settings.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/image_settings.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.YouTube.V3.Model.ImageSettings do
@moduledoc """
Branding properties for images associated with the channel.
## Attributes
* `backgroundImageUrl` (*type:* `GoogleApi.YouTube.V3.Model.LocalizedProperty.t`, *default:* `nil`) - The URL for the background image shown on the video watch page. The image should be 1200px by 615px, with a maximum file size of 128k.
* `bannerExternalUrl` (*type:* `String.t`, *default:* `nil`) - This is used only in update requests; if it's set, we use this URL to generate all of the above banner URLs.
* `bannerImageUrl` (*type:* `String.t`, *default:* `nil`) - Banner image. Desktop size (1060x175).
* `bannerMobileExtraHdImageUrl` (*type:* `String.t`, *default:* `nil`) - Banner image. Mobile size high resolution (1440x395).
* `bannerMobileHdImageUrl` (*type:* `String.t`, *default:* `nil`) - Banner image. Mobile size high resolution (1280x360).
* `bannerMobileImageUrl` (*type:* `String.t`, *default:* `nil`) - Banner image. Mobile size (640x175).
* `bannerMobileLowImageUrl` (*type:* `String.t`, *default:* `nil`) - Banner image. Mobile size low resolution (320x88).
* `bannerMobileMediumHdImageUrl` (*type:* `String.t`, *default:* `nil`) - Banner image. Mobile size medium/high resolution (960x263).
* `bannerTabletExtraHdImageUrl` (*type:* `String.t`, *default:* `nil`) - Banner image. Tablet size extra high resolution (2560x424).
* `bannerTabletHdImageUrl` (*type:* `String.t`, *default:* `nil`) - Banner image. Tablet size high resolution (2276x377).
* `bannerTabletImageUrl` (*type:* `String.t`, *default:* `nil`) - Banner image. Tablet size (1707x283).
* `bannerTabletLowImageUrl` (*type:* `String.t`, *default:* `nil`) - Banner image. Tablet size low resolution (1138x188).
* `bannerTvHighImageUrl` (*type:* `String.t`, *default:* `nil`) - Banner image. TV size high resolution (1920x1080).
* `bannerTvImageUrl` (*type:* `String.t`, *default:* `nil`) - Banner image. TV size extra high resolution (2120x1192).
* `bannerTvLowImageUrl` (*type:* `String.t`, *default:* `nil`) - Banner image. TV size low resolution (854x480).
* `bannerTvMediumImageUrl` (*type:* `String.t`, *default:* `nil`) - Banner image. TV size medium resolution (1280x720).
* `largeBrandedBannerImageImapScript` (*type:* `GoogleApi.YouTube.V3.Model.LocalizedProperty.t`, *default:* `nil`) - The image map script for the large banner image.
* `largeBrandedBannerImageUrl` (*type:* `GoogleApi.YouTube.V3.Model.LocalizedProperty.t`, *default:* `nil`) - The URL for the 854px by 70px image that appears below the video player in the expanded video view of the video watch page.
* `smallBrandedBannerImageImapScript` (*type:* `GoogleApi.YouTube.V3.Model.LocalizedProperty.t`, *default:* `nil`) - The image map script for the small banner image.
* `smallBrandedBannerImageUrl` (*type:* `GoogleApi.YouTube.V3.Model.LocalizedProperty.t`, *default:* `nil`) - The URL for the 640px by 70px banner image that appears below the video player in the default view of the video watch page.
* `trackingImageUrl` (*type:* `String.t`, *default:* `nil`) - The URL for a 1px by 1px tracking pixel that can be used to collect statistics for views of the channel or video pages.
* `watchIconImageUrl` (*type:* `String.t`, *default:* `nil`) - The URL for the image that appears above the top-left corner of the video player. This is a 25-pixel-high image with a flexible width that cannot exceed 170 pixels.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:backgroundImageUrl => GoogleApi.YouTube.V3.Model.LocalizedProperty.t(),
:bannerExternalUrl => String.t(),
:bannerImageUrl => String.t(),
:bannerMobileExtraHdImageUrl => String.t(),
:bannerMobileHdImageUrl => String.t(),
:bannerMobileImageUrl => String.t(),
:bannerMobileLowImageUrl => String.t(),
:bannerMobileMediumHdImageUrl => String.t(),
:bannerTabletExtraHdImageUrl => String.t(),
:bannerTabletHdImageUrl => String.t(),
:bannerTabletImageUrl => String.t(),
:bannerTabletLowImageUrl => String.t(),
:bannerTvHighImageUrl => String.t(),
:bannerTvImageUrl => String.t(),
:bannerTvLowImageUrl => String.t(),
:bannerTvMediumImageUrl => String.t(),
:largeBrandedBannerImageImapScript => GoogleApi.YouTube.V3.Model.LocalizedProperty.t(),
:largeBrandedBannerImageUrl => GoogleApi.YouTube.V3.Model.LocalizedProperty.t(),
:smallBrandedBannerImageImapScript => GoogleApi.YouTube.V3.Model.LocalizedProperty.t(),
:smallBrandedBannerImageUrl => GoogleApi.YouTube.V3.Model.LocalizedProperty.t(),
:trackingImageUrl => String.t(),
:watchIconImageUrl => String.t()
}
field(:backgroundImageUrl, as: GoogleApi.YouTube.V3.Model.LocalizedProperty)
field(:bannerExternalUrl)
field(:bannerImageUrl)
field(:bannerMobileExtraHdImageUrl)
field(:bannerMobileHdImageUrl)
field(:bannerMobileImageUrl)
field(:bannerMobileLowImageUrl)
field(:bannerMobileMediumHdImageUrl)
field(:bannerTabletExtraHdImageUrl)
field(:bannerTabletHdImageUrl)
field(:bannerTabletImageUrl)
field(:bannerTabletLowImageUrl)
field(:bannerTvHighImageUrl)
field(:bannerTvImageUrl)
field(:bannerTvLowImageUrl)
field(:bannerTvMediumImageUrl)
field(:largeBrandedBannerImageImapScript, as: GoogleApi.YouTube.V3.Model.LocalizedProperty)
field(:largeBrandedBannerImageUrl, as: GoogleApi.YouTube.V3.Model.LocalizedProperty)
field(:smallBrandedBannerImageImapScript, as: GoogleApi.YouTube.V3.Model.LocalizedProperty)
field(:smallBrandedBannerImageUrl, as: GoogleApi.YouTube.V3.Model.LocalizedProperty)
field(:trackingImageUrl)
field(:watchIconImageUrl)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.ImageSettings do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.ImageSettings.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.ImageSettings do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 62.545455 | 240 | 0.718605 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.