hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9ee4775c31fc2c899239d524523b12a3e30081cc | 293 | ex | Elixir | lib/requestbox_web/resolvers/sessions.ex | kevinastone/phoenixbin | 8b7326b5de1fe9961c1a2d7971a3d4abe7178829 | [
"MIT"
] | 18 | 2015-11-18T09:52:34.000Z | 2021-04-27T19:38:08.000Z | lib/requestbox_web/resolvers/sessions.ex | kevinastone/phoenixbin | 8b7326b5de1fe9961c1a2d7971a3d4abe7178829 | [
"MIT"
] | 3 | 2017-01-11T18:55:39.000Z | 2021-06-15T05:46:34.000Z | lib/requestbox_web/resolvers/sessions.ex | kevinastone/phoenixbin | 8b7326b5de1fe9961c1a2d7971a3d4abe7178829 | [
"MIT"
] | 7 | 2016-08-17T10:24:20.000Z | 2020-07-10T13:00:36.000Z | defmodule RequestboxWeb.Resolvers.Sessions do
use Requestbox.Web, :controller
alias Requestbox.Session
def find_session(%{id: id}, _) do
case Session.find_session(id) do
nil -> {:error, "Request Session not found for #{id}"}
session -> {:ok, session}
end
end
end
| 22.538462 | 60 | 0.675768 |
9ee4b5ec8c1fdf64508102508a2be34ffeff60d4 | 346 | ex | Elixir | lib/battleships/gamelist_supervisor.ex | alexander-lazarov/battleships | e35fcb1fc48281881f2976cd7ec8cf7a6cb1f707 | [
"MIT"
] | null | null | null | lib/battleships/gamelist_supervisor.ex | alexander-lazarov/battleships | e35fcb1fc48281881f2976cd7ec8cf7a6cb1f707 | [
"MIT"
] | 2 | 2020-07-17T13:59:03.000Z | 2021-05-09T20:35:32.000Z | lib/battleships/gamelist_supervisor.ex | alexander-lazarov/battleships | e35fcb1fc48281881f2976cd7ec8cf7a6cb1f707 | [
"MIT"
] | null | null | null | defmodule Battheships.GamelistSupervisor do
use Supervisor
def init(_init_arg) do
children = [
Battleships.GamelistServer,
Battleships.GamesSupervisor
]
Supervisor.init(children, strategy: :one_for_all)
end
def start_link(init_arg) do
Supervisor.start_link(__MODULE__, init_arg, name: __MODULE__)
end
end
| 20.352941 | 65 | 0.736994 |
9ee4d01d23624df487c0b24486ace5676a8496af | 1,616 | exs | Elixir | mix.exs | hotpyn/context-demo | 304c767be93f8dec75ef8ec97d13e5e90352a47d | [
"MIT"
] | null | null | null | mix.exs | hotpyn/context-demo | 304c767be93f8dec75ef8ec97d13e5e90352a47d | [
"MIT"
] | null | null | null | mix.exs | hotpyn/context-demo | 304c767be93f8dec75ef8ec97d13e5e90352a47d | [
"MIT"
] | null | null | null | defmodule Context.Mixfile do
use Mix.Project
def project do
[
app: :context,
version: "0.0.1",
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Context.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.3.4"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_ecto, "~> 3.2"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.10"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"},
{:plug_cowboy, "~> 1.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"test": ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 26.064516 | 79 | 0.581064 |
9ee4ea79fa53b2021549440de720b5374af8e550 | 226 | exs | Elixir | apps/data/config/dev.exs | elixirschool/extracurricular | eb8b725fa49ca91b1c6b7e610a8522bc81a80de1 | [
"MIT"
] | 48 | 2017-08-21T02:08:16.000Z | 2022-01-05T14:02:56.000Z | apps/data/config/dev.exs | elixirschool/extracurricular | eb8b725fa49ca91b1c6b7e610a8522bc81a80de1 | [
"MIT"
] | 68 | 2017-08-21T02:17:32.000Z | 2017-11-09T15:56:27.000Z | apps/data/config/dev.exs | elixirschool/extracurricular | eb8b725fa49ca91b1c6b7e610a8522bc81a80de1 | [
"MIT"
] | 26 | 2017-08-21T04:28:22.000Z | 2018-12-09T14:20:29.000Z | use Mix.Config
# Configure your database
config :data, Data.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "extracurricular_dev",
hostname: "localhost",
pool_size: 10
| 20.545455 | 34 | 0.730088 |
9ee50a287712d1ace2f1423dca1aff6155d2de20 | 3,426 | ex | Elixir | clients/service_control/lib/google_api/service_control/v1/model/report_response.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/service_control/lib/google_api/service_control/v1/model/report_response.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/service_control/lib/google_api/service_control/v1/model/report_response.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.ServiceControl.V1.Model.ReportResponse do
@moduledoc """
Response message for the Report method.
## Attributes
- reportErrors ([ReportError]): Partial failures, one for each `Operation` in the request that failed processing. There are three possible combinations of the RPC status: 1. The combination of a successful RPC status and an empty `report_errors` list indicates a complete success where all `Operations` in the request are processed successfully. 2. The combination of a successful RPC status and a non-empty `report_errors` list indicates a partial success where some `Operations` in the request succeeded. Each `Operation` that failed processing has a corresponding item in this list. 3. A failed RPC status indicates a general non-deterministic failure. When this happens, it's impossible to know which of the 'Operations' in the request succeeded or failed. Defaults to: `null`.
- reportInfos ([ReportInfo]): Quota usage for each quota release `Operation` request. Fully or partially failed quota release request may or may not be present in `report_quota_info`. For example, a failed quota release request will have the current quota usage info when precise quota library returns the info. A deadline exceeded quota request will not have quota usage info. If there is no quota release request, report_quota_info will be empty. Defaults to: `null`.
- serviceConfigId (String.t): The actual config id used to process the request. Defaults to: `null`.
- serviceRolloutId (String.t): Unimplemented. The current service rollout id used to process the request. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:reportErrors => list(GoogleApi.ServiceControl.V1.Model.ReportError.t()),
:reportInfos => list(GoogleApi.ServiceControl.V1.Model.ReportInfo.t()),
:serviceConfigId => any(),
:serviceRolloutId => any()
}
field(:reportErrors, as: GoogleApi.ServiceControl.V1.Model.ReportError, type: :list)
field(:reportInfos, as: GoogleApi.ServiceControl.V1.Model.ReportInfo, type: :list)
field(:serviceConfigId)
field(:serviceRolloutId)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceControl.V1.Model.ReportResponse do
def decode(value, options) do
GoogleApi.ServiceControl.V1.Model.ReportResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceControl.V1.Model.ReportResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 60.105263 | 880 | 0.754524 |
9ee50aca6ba4d0d747c29af3a2149e8150fe0262 | 733 | ex | Elixir | lib/live_sup_web/controllers/project_controller.ex | livesup-dev/livesup | eaf9ffc78d3043bd9e3408f0f4df26ed16eb8446 | [
"Apache-2.0",
"MIT"
] | null | null | null | lib/live_sup_web/controllers/project_controller.ex | livesup-dev/livesup | eaf9ffc78d3043bd9e3408f0f4df26ed16eb8446 | [
"Apache-2.0",
"MIT"
] | 3 | 2022-02-23T15:51:48.000Z | 2022-03-14T22:52:43.000Z | lib/live_sup_web/controllers/project_controller.ex | livesup-dev/livesup | eaf9ffc78d3043bd9e3408f0f4df26ed16eb8446 | [
"Apache-2.0",
"MIT"
] | null | null | null | defmodule LiveSupWeb.ProjectController do
use LiveSupWeb, :controller
alias LiveSup.Core.Projects
alias LiveSup.Policies.ProjectPolicy
def index(conn, _params) do
current_user = conn.assigns.current_user
projects = current_user |> Projects.by_user()
render(conn, "index.html",
current_user: current_user,
projects: projects
)
end
def show(conn, %{"id" => project_id}) do
current_user = conn.assigns.current_user
with {:ok, project} <- project_id |> Projects.get_with_dashboards(),
:ok <- Bodyguard.permit(ProjectPolicy, :read, current_user, project) do
render(conn, "show.html",
current_user: current_user,
project: project
)
end
end
end
| 26.178571 | 80 | 0.683492 |
9ee515f9dfb4e58ac50ea57b19931c63df8616e3 | 620 | ex | Elixir | Microsoft.Azure.Management.Preview.Subscription/lib/microsoft/azure/management/preview/subscription/model/ad_principal.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Preview.Subscription/lib/microsoft/azure/management/preview/subscription/model/ad_principal.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Preview.Subscription/lib/microsoft/azure/management/preview/subscription/model/ad_principal.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Microsoft.Azure.Management.Preview.Subscription.Model.AdPrincipal do
@moduledoc """
Active Directory Principal for subscription creation delegated permission
"""
@derive [Poison.Encoder]
defstruct [
:"objectId"
]
@type t :: %__MODULE__{
:"objectId" => String.t
}
end
defimpl Poison.Decoder, for: Microsoft.Azure.Management.Preview.Subscription.Model.AdPrincipal do
def decode(value, _options) do
value
end
end
| 23.846154 | 97 | 0.740323 |
9ee537b4578618f330f250a594e25de21b53c436 | 2,238 | exs | Elixir | mix.exs | appcues/elixometer | 1f95425d8e1df02edd0b267c57c6d579d45c4c8d | [
"Apache-2.0"
] | null | null | null | mix.exs | appcues/elixometer | 1f95425d8e1df02edd0b267c57c6d579d45c4c8d | [
"Apache-2.0"
] | null | null | null | mix.exs | appcues/elixometer | 1f95425d8e1df02edd0b267c57c6d579d45c4c8d | [
"Apache-2.0"
] | null | null | null | defmodule Elixometer.Mixfile do
use Mix.Project
@description """
Elixometer is a light wrapper around exometer that defines and
subscribes metrics automatically to the configured reporter.
"""
@project_url "https://github.com/pinterest/elixometer"
def project do
[
app: :elixometer,
version: "1.3.0-dev",
elixir: ">= 1.3.0",
description: @description,
source_url: @project_url,
homepage_url: @project_url,
package: package(),
deps: deps(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.html": :test,
"coveralls.post": :test
],
elixirc_paths: elixirc_paths(Mix.env()),
# Dialyzer
dialyzer: [plt_add_deps: :transitive],
# Docs
name: "Elixometer",
docs: [
main: "Elixometer",
source_url: @project_url
]
]
end
def application do
[
mod: {Elixometer.App, []},
applications: [:lager, :exometer_core, :pobox],
erl_opts: [parse_transform: "lager_transform"],
env: default_config(Mix.env())
]
end
def default_config(:test) do
[update_frequency: 20]
end
def default_config(_) do
[update_frequency: 1_000]
end
defp deps do
[
# lager 3.2.1 is needed for erl19 because of
# https://github.com/basho/lager/pull/321
{:lager, ">= 3.2.1", override: true},
# Force rebar so that setup can build, does not build with rebar3 base compiler
{:setup, "1.8.4", override: true, manager: :rebar},
{:exometer_core, "~> 1.4"},
{:credo, "~> 0.8", only: [:dev, :test]},
{:dialyxir, "~> 0.5", only: :dev, runtime: false},
{:ex_doc, "~> 0.18", only: :dev},
{:excoveralls, "~> 0.8", only: [:dev, :test]},
{:pobox, "~>1.0.2"}
]
end
defp package do
[
files: ["config", "lib", "mix.exs", "mix.lock", "README.md", "LICENSE"],
maintainers: ["Jon Parise", "Steve Cohen"],
licenses: ["Apache 2.0"],
links: %{"GitHub" => @project_url}
]
end
defp elixirc_paths(:test) do
["lib", "test/support"]
end
defp elixirc_paths(_) do
["lib"]
end
end
| 24.326087 | 85 | 0.578195 |
9ee5478edb48de8d879bb46a97573d8cbb534101 | 12,553 | ex | Elixir | lib/set/parser.ex | josevalim/unicode_set | da3f4dcf6553e5c7e364461a76ad6c4de1e20c93 | [
"Apache-2.0"
] | 1 | 2022-02-27T22:57:18.000Z | 2022-02-27T22:57:18.000Z | lib/set/parser.ex | josevalim/unicode_set | da3f4dcf6553e5c7e364461a76ad6c4de1e20c93 | [
"Apache-2.0"
] | null | null | null | lib/set/parser.ex | josevalim/unicode_set | da3f4dcf6553e5c7e364461a76ad6c4de1e20c93 | [
"Apache-2.0"
] | null | null | null | defmodule Unicode.Set.Parser do
@moduledoc false
import NimbleParsec
import Unicode.Set.Property
@doc false
defguard is_hex_digit(c) when c in ?0..?9 or c in ?a..?z or c in ?A..?Z
@doc false
def unicode_set do
choice([
property(),
empty_set(),
basic_set()
])
end
@doc false
def basic_set do
ignore(ascii_char([?[]))
|> optional(ascii_char([?-, ?^]) |> replace(:not))
|> times(sequence(), min: 1)
|> ignore(ascii_char([?]]))
|> reduce(:reduce_set_operations)
|> label("set")
end
@doc false
def empty_set do
string("[-]")
|> label("empty set")
end
@doc false
def sequence do
choice([
maybe_repeated_set(),
range()
])
|> ignore(optional(whitespace()))
|> label("sequence")
end
@doc false
def maybe_repeated_set do
parsec(:one_set)
|> repeat(set_operator() |> parsec(:one_set))
end
@debug_functions []
defmacrop tracer(step, a) do
{caller, _} = __CALLER__.function
if Mix.env() in [:dev] and caller in @debug_functions do
quote do
IO.inspect("#{unquote(caller)}", label: "Step #{unquote(step)}")
IO.inspect(unquote(a), label: "argument")
end
else
quote do
_ = {unquote(step), unquote(a)}
end
end
end
@doc false
def reduce_set_operations([set_a]) do
tracer(0, [set_a])
set_a
end
def reduce_set_operations([set_a, operator, set_b])
when operator in [:difference, :intersection] do
tracer(1, [set_a, operator, set_b])
{operator, [set_a, set_b]}
end
def reduce_set_operations([set_a, operator, set_b | repeated_sets])
when operator in [:difference, :intersection] do
tracer(2, [set_a, operator, set_b | repeated_sets])
reduce_set_operations([{operator, [set_a, set_b]} | repeated_sets])
end
def reduce_set_operations([{:in, ranges1}, {:in, ranges2} | rest]) do
tracer(2, [{:in, ranges1}, {:in, ranges2} | rest])
reduce_set_operations([{:in, Enum.sort(ranges1 ++ ranges2)} | rest])
end
def reduce_set_operations([:not, {:in, ranges1}, {:in, ranges2} | rest]) do
tracer(3, [:not, {:in, ranges1}, {:in, ranges2} | rest])
reduce_set_operations([:not, {:in, Enum.sort(ranges1 ++ ranges2)} | rest])
end
def reduce_set_operations([:not, {:in, ranges} | rest]) do
tracer(4, [:not, {:in, ranges} | rest])
reduce_set_operations([{:not_in, ranges} | rest])
end
def reduce_set_operations([:not, {:not_in, ranges}]) do
tracer(5, [:not, {:not_in, ranges}])
reduce_set_operations([{:in, ranges}])
end
def reduce_set_operations([:not | ranges]) do
tracer(6, [:not | ranges])
reduce_set_operations([{:not_in, ranges}])
end
def reduce_set_operations([set_a | rest]) do
tracer(7, [set_a | rest])
{:union, [set_a, reduce_set_operations(rest)]}
end
@doc false
def set_operator do
ignore(optional(whitespace()))
|> choice([
ascii_char([?&]) |> replace(:intersection),
ascii_char([?-]) |> replace(:difference)
])
|> ignore(optional(whitespace()))
end
@doc false
def range do
choice([
character_range(),
string_range()
])
|> reduce(:reduce_range)
|> post_traverse(:check_valid_range)
|> label("range")
end
@doc false
def character_range do
char()
|> ignore(optional(whitespace()))
|> optional(
ignore(ascii_char([?-]))
|> ignore(optional(whitespace()))
|> concat(char())
)
end
@doc false
# Of the forrm {abc} or {abc-def}
def string_range do
string()
|> wrap
|> ignore(optional(whitespace()))
|> optional(
ignore(ascii_char([?-]))
|> ignore(optional(whitespace()))
|> concat(string() |> wrap)
)
end
@doc false
def reduce_range([[bracketed]]) when is_list(bracketed),
do: {:in, Enum.map(bracketed, &{&1, &1})}
def reduce_range([[from]]) when is_integer(from), do: {:in, [{from, from}]}
def reduce_range([[from], [to]]) when is_integer(from) and is_integer(to),
do: {:in, [{from, to}]}
def reduce_range([from]), do: {:in, [{from, from}]}
def reduce_range([from, to]), do: {:in, [{from, to}]}
@doc false
def check_valid_range(rest, [in: [{from, to}]] = args, context, _, _)
when is_integer(from) and is_integer(to) do
{rest, args, context}
end
def check_valid_range(rest, [in: [{from, from}]] = args, context, _, _) do
{rest, args, context}
end
def check_valid_range(rest, [in: [{from, to}]] = args, context, _, _) do
if length(from) == 1 or length(to) == 1 do
{:error,
"String ranges must be longer than one character. Found " <>
format_string_range(from, to)}
else
{rest, args, context}
end
end
@doc false
def property do
choice([
perl_property(),
posix_property()
])
|> post_traverse(:reduce_property)
|> label("property")
end
@doc false
def posix_property do
ignore(string("[:"))
|> optional(ascii_char([?^]) |> replace(:not))
|> property_expression([{:not, ?:}])
|> ignore(string(":]"))
|> label("posix property")
end
@doc false
def perl_property do
ignore(ascii_char([?\\]))
|> choice([ascii_char([?P]) |> replace(:not), ignore(ascii_char([?p]))])
|> ignore(ascii_char([?{]))
|> property_expression([{:not, ?}}])
|> ignore(ascii_char([?}]))
|> label("perl property")
end
@doc false
def operator do
choice([
utf8_char([0x2260]) |> replace(:not_in),
ascii_char([?=]) |> replace(:in)
])
end
@doc false
def property_expression(combinator \\ empty(), fence) do
combinator
|> choice([
is_block()
|> ignore(optional(whitespace()))
|> concat(value(fence)),
property_name()
|> optional(operator() |> ignore(optional(whitespace())) |> concat(value(fence)))
])
end
@doc false
def reduce_property(rest, [value, "block" = property], context, _line, _offset) do
tracer(0, [value, :in, property])
case fetch_property!(property, value) do
%{parsed: parsed} -> {rest, [{:in, parsed}], context}
ranges -> {rest, [{:in, ranges}], context}
end
end
def reduce_property(rest, [value, "block" = property, :not], context, _line, _offset) do
tracer(1, [value, :in, property, :not])
case fetch_property!(property, value) do
%{parsed: parsed} -> {rest, [{:not_in, parsed}], context}
ranges -> {rest, [{:not_in, ranges}], context}
end
end
def reduce_property(rest, [value, :in, property, :not], context, _line, _offset) do
tracer(2, [value, :in, property, :not])
case fetch_property!(property, value) do
%{parsed: parsed} -> {rest, [{:not_in, parsed}], context}
ranges -> {rest, [{:not_in, ranges}], context}
end
end
def reduce_property(rest, [value, :not_in, property, :not], context, _line, _offset) do
tracer(3, [value, :not_in, property, :not])
case fetch_property!(property, value) do
%{parsed: parsed} -> {rest, parsed, context}
ranges -> {rest, [{:in, ranges}], context}
end
end
def reduce_property(rest, [value, operator, property], context, _line, _offset)
when operator in [:in, :not_in] do
tracer(4, [value, operator, property])
case fetch_property!(property, value) do
%{parsed: parsed} -> {rest, [{operator, parsed}], context}
ranges -> {rest, [{operator, ranges}], context}
end
end
def reduce_property(rest, [value, :not], context, _line, _offset) do
tracer(5, [value, :not])
case fetch_property!(:script_or_category, value) do
%{parsed: [{:not_in, parsed}]} -> {rest, [{:in, parsed}], context}
%{parsed: [{:in, parsed}]} -> {rest, [{:not_in, parsed}], context}
%{parsed: parsed} -> {rest, [{:not_in, parsed}], context}
ranges -> {rest, [{:not_in, ranges}], context}
end
end
def reduce_property(rest, [value], context, _line, _offset) do
tracer(6, [value])
case fetch_property!(:script_or_category, value) do
%{parsed: [{:not_in, parsed}]} -> {rest, [{:not_in, parsed}], context}
%{parsed: [{:in, parsed}]} -> {rest, [{:in, parsed}], context}
%{parsed: parsed} -> {rest, parsed, context}
ranges -> {rest, [{:in, ranges}], context}
end
end
@doc false
def is_block do
choice([
string("is") |> replace("block"),
string("Is") |> replace("block"),
string("iS") |> replace("block"),
string("IS") |> replace("block")
])
|> label("property name")
end
@doc false
@alphanumeric [?a..?z, ?A..?Z, ?0..?9]
def property_name do
ascii_char(@alphanumeric)
|> repeat(ascii_char(@alphanumeric ++ [?_, ?\s]))
|> ignore(optional(whitespace()))
|> reduce(:to_lower_string)
|> label("property name")
end
@doc false
def value(gate) do
times(
choice([
ignore(ascii_char([?\\])) |> concat(quoted()),
ascii_char(gate)
]),
min: 1
)
|> reduce(:to_lower_string)
end
@doc false
def to_lower_string(args) do
args
|> List.to_string()
|> String.replace(" ", "_")
|> String.downcase()
end
@doc false
@whitespace_chars [0x20, 0x9..0xD, 0x85, 0x200E, 0x200F, 0x2028, 0x2029]
def whitespace_char do
ascii_char(@whitespace_chars)
end
@doc false
def whitespace do
times(whitespace_char(), min: 1)
end
@doc false
def string do
ignore(ascii_char([?{]))
|> times(ignore(optional(whitespace())) |> concat(char()), min: 1)
|> ignore(optional(whitespace()))
|> ignore(ascii_char([?}]))
end
@doc false
# ++ @whitespace_chars
@syntax_chars [?&, ?-, ?[, ?], ?\\, ?{, ?}]
@not_syntax_chars Enum.map(@syntax_chars, fn c -> {:not, c} end)
def char do
choice([
ignore(ascii_char([?\\])) |> concat(quoted()),
utf8_char(@not_syntax_chars)
])
end
@doc false
def quoted do
choice([
ignore(ascii_char([?x]))
|> choice([
ascii_char([?0]) |> times(hex(), 5),
ascii_char([?1]) |> ascii_char([?0]) |> times(hex(), 4)
]),
string("N{") |> concat(property_name()) |> ascii_char([?}]),
ignore(ascii_char([?u])) |> choice([times(hex(), 4), bracketed_hex()]),
ignore(ascii_char([?x])) |> choice([times(hex(), 2), bracketed_hex()]),
utf8_char([0x0..0x10FFFF])
])
|> reduce(:hex_to_codepoint)
|> label("quoted character")
end
@doc false
def bracketed_hex do
ignore(ascii_char([?{]))
|> ignore(optional(whitespace()))
|> concat(hex_codepoint())
|> repeat(ignore(optional(whitespace())) |> concat(hex_codepoint()))
|> ignore(optional(whitespace()))
|> ignore(ascii_char([?}]))
|> wrap
|> label("bracketed hex")
end
@doc false
def hex_codepoint do
choice([
times(hex(), min: 1, max: 5),
ascii_char([?1]) |> ascii_char([?0]) |> times(hex(), 4)
])
|> wrap
|> label("hex codepoint")
end
@doc false
def hex do
ascii_char([?a..?f, ?A..?F, ?0..?9])
|> label("hex character")
end
@doc false
# Its just an escaped char
def hex_to_codepoint([?t]), do: ?\t
def hex_to_codepoint([?n]), do: ?\n
def hex_to_codepoint([?r]), do: ?\r
def hex_to_codepoint([c]) when not is_hex_digit(c), do: c
# Actual hex-encoded codepoints
def hex_to_codepoint([arg | _rest] = args) when is_list(arg) do
Enum.map(args, &hex_to_codepoint/1)
end
def hex_to_codepoint(args) do
args
|> List.to_string()
|> String.to_integer(16)
end
@doc false
# Applied to a regex
def repetition do
ignore(optional(whitespace()))
|> choice([
ascii_char([?*]) |> replace({:repeat, min: 0, max: :infinity}),
ascii_char([?+]) |> replace({:repeat, min: 1, max: :infinity}),
ascii_char([??]) |> replace({:repeat, min: 0, max: 1}),
iterations()
])
end
@doc false
def iterations do
ignore(ascii_char([?{]))
|> ignore(optional(whitespace()))
|> integer(min: 1)
|> ignore(optional(whitespace()))
|> ignore(ascii_char([?,]))
|> ignore(optional(whitespace()))
|> integer(min: 1)
|> ignore(optional(whitespace()))
|> ignore(ascii_char([?}]))
|> reduce(:iteration)
end
@doc false
def iteration([from, to]) do
{:repeat, min: from, max: to}
end
@doc false
def anchor do
ignore(optional(whitespace())) |> ascii_char([?$]) |> replace(:end)
end
# Helpers
# -------
defp format_string_range(from, to) do
"{#{List.to_string(from)}}-{#{List.to_string(to)}}"
end
end
| 26.152083 | 90 | 0.592687 |
9ee54cec6ef07b847f5483c86ce76940ef39a296 | 1,786 | ex | Elixir | clients/games/lib/google_api/games/v1/model/category.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/games/lib/google_api/games/v1/model/category.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/games/lib/google_api/games/v1/model/category.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Games.V1.Model.Category do
@moduledoc """
This is a JSON template for data related to individual game categories.
## Attributes
* `category` (*type:* `String.t`, *default:* `nil`) - The category name.
* `experiencePoints` (*type:* `String.t`, *default:* `nil`) - Experience points earned in this category.
* `kind` (*type:* `String.t`, *default:* `games#category`) - Uniquely identifies the type of this resource. Value is always the fixed string games#category.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:category => String.t(),
:experiencePoints => String.t(),
:kind => String.t()
}
field(:category)
field(:experiencePoints)
field(:kind)
end
defimpl Poison.Decoder, for: GoogleApi.Games.V1.Model.Category do
def decode(value, options) do
GoogleApi.Games.V1.Model.Category.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Games.V1.Model.Category do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.698113 | 160 | 0.712766 |
9ee584a0552ac13a0d5b8d9a4fc3d6b1a74b7126 | 961 | ex | Elixir | clients/calendar/lib/google_api/calendar/v3/request_builder.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/calendar/lib/google_api/calendar/v3/request_builder.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/calendar/lib/google_api/calendar/v3/request_builder.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Calendar.V3.RequestBuilder do
@moduledoc """
Helper functions for building Tesla requests.
This module is no longer used. Please use GoogleApi.Gax.Request instead.
"""
end
| 36.961538 | 77 | 0.763788 |
9ee5856094ea3147c1283b7e39857fff2c39a7ef | 14,590 | exs | Elixir | test/validation_checks_test.exs | kor-and-m/exop | 774d14a5c680b98481b333d75d8e2918aa2a9341 | [
"MIT"
] | null | null | null | test/validation_checks_test.exs | kor-and-m/exop | 774d14a5c680b98481b333d75d8e2918aa2a9341 | [
"MIT"
] | null | null | null | test/validation_checks_test.exs | kor-and-m/exop | 774d14a5c680b98481b333d75d8e2918aa2a9341 | [
"MIT"
] | null | null | null | defmodule ValidationChecksTest do
use ExUnit.Case, async: false
doctest Exop.ValidationChecks
import Exop.ValidationChecks
defmodule TestStruct do
defstruct [:qwerty]
end
defmodule TestStruct2 do
defstruct [:qwerty]
end
test "get_check_item/2: returns value by key either from Keyword or Map" do
assert get_check_item(%{a: 1, b: 2}, :a) == 1
assert get_check_item([a: 1, b: 2], :b) == 2
end
test "get_check_item/2: returns nil if key was not found" do
assert is_nil(get_check_item(%{a: 1, b: 2}, :c))
assert is_nil(get_check_item([a: 1, b: 2], :c))
end
test "check_item_present?/2: checks whether a param has been provided" do
assert check_item_present?(%{a: 1, b: 2}, :a) == true
assert check_item_present?([a: 1, b: 2], :b) == true
assert check_item_present?([a: 1, b: nil], :b) == true
assert check_item_present?(%{a: 1, b: 2}, :c) == false
assert check_item_present?([a: 1, b: 2], :c) == false
end
test "check_required/3: returns true if required = false" do
assert check_required(%{}, :some_item, false) == true
end
test "check_required/3: returns true if item is in params and required = true" do
assert check_required([a: 1, b: 2], :a, true) == true
end
test "check_required/3: returns %{item_name => error_msg} if item is not in params and required = true" do
%{c: reason} = check_required([a: 1, b: 2], :c, true)
assert is_binary(reason)
end
test "check_required/3: returns true if item is in params and equal to false and required = true" do
assert check_required([a: false, b: 2], :a, true)
end
test "check_type/3: returns true if item is not in params" do
assert check_type(%{}, :a, :integer) == true
end
test "check_type/3: returns true if item is of unhandled type" do
assert check_type(%{a: 1}, :a, :unhandled) == true
end
test "check_type/3: returns true if item is one of handled type" do
assert check_type(%{a: 1}, :a, :integer) == true
end
test "check_type/3: returns %{item_name => error_msg} if item is not of needed type" do
%{a: reason} = check_type(%{a: "1"}, :a, :integer)
assert is_binary(reason)
end
test "check_type/3: returns false if item is nil but type is not atom" do
assert check_type(%{a: nil}, :a, :string) == %{:a => "has wrong type"}
end
test "check_type/3: checks module" do
defmodule TestModule do
end
assert check_type(%{a: TestModule}, :a, :module) == true
assert check_type(%{a: TestModule_2}, :a, :module) == %{a: "has wrong type"}
assert check_type(%{a: :atom}, :a, :module) == %{a: "has wrong type"}
assert check_type(%{a: 1}, :a, :module) == %{a: "has wrong type"}
end
test "check_type/3: checks keyword" do
assert check_type(%{a: [b: 1, c: "2"]}, :a, :keyword) == true
assert check_type(%{a: [{:b, 1}, {:c, "2"}]}, :a, :keyword) == true
assert check_type(%{a: []}, :a, :keyword) == true
assert check_type(%{a: :atom}, :a, :keyword) == %{a: "has wrong type"}
assert check_type(%{a: %{b: 1, c: "2"}}, :a, :keyword) == %{a: "has wrong type"}
end
test "check_type/3: checks structs" do
assert check_type(%{a: %TestStruct{qwerty: :asdfgh}}, :a, :struct) == true
assert check_type(%{a: %TestStruct{}}, :a, :struct) == true
assert check_type(%{a: %{b: 1, c: "2"}}, :a, :struct) == %{a: "has wrong type"}
assert check_type(%{a: %{}}, :a, :struct) == %{a: "has wrong type"}
assert check_type(%{a: [{:b, 1}, {:c, "2"}]}, :a, :struct) == %{a: "has wrong type"}
assert check_type(%{a: :atom}, :a, :struct) == %{a: "has wrong type"}
end
test "check_type/3: checks uuids" do
# uuid 1
assert check_type(%{a: "9689317e-39ac-11e9-b210-d663bd873d93"}, :a, :uuid) == true
# uuid 4
assert check_type(%{a: "7b79b77b-bc4c-4de1-a81f-1a07fc3289c2"}, :a, :uuid) == true
assert check_type(%{a: ""}, :a, :uuid) == %{a: "has wrong type"}
assert check_type(%{a: "qwerty"}, :a, :uuid) == %{a: "has wrong type"}
assert check_type(%{a: "qwerty-asdf"}, :a, :uuid) == %{a: "has wrong type"}
assert check_type(%{a: :b}, :a, :uuid) == %{a: "has wrong type"}
assert check_type(%{a: 1}, :a, :uuid) == %{a: "has wrong type"}
end
test "check_numericality/3: returns %{item_name => error_msg} if item is in params and is not a number" do
%{a: reason} = check_numericality(%{a: "1"}, :a, %{less_than: 3})
assert is_binary(reason)
end
test "check_numericality/3: returns true if item is not in params" do
assert check_numericality(%{a: 1}, :b, %{less_than: 3}) == true
end
test "check_numericality/3: fails" do
[%{a: _}] = check_numericality(%{a: 1}, :a, %{equal_to: 3})
[%{a: _}] = check_numericality(%{a: 1}, :a, %{eq: 3})
[%{a: _}] = check_numericality(%{a: 1}, :a, %{greater_than: 3})
[%{a: _}] = check_numericality(%{a: 1}, :a, %{gt: 3})
[%{a: _}] = check_numericality(%{a: 1}, :a, %{greater_than_or_equal_to: 3})
[%{a: _}] = check_numericality(%{a: 1}, :a, %{gte: 3})
[%{a: _}] = check_numericality(%{a: 5}, :a, %{less_than: 3})
[%{a: _}] = check_numericality(%{a: 5}, :a, %{lt: 3})
[%{a: _}] = check_numericality(%{a: 5}, :a, %{less_than_or_equal_to: 3})
[%{a: _}] = check_numericality(%{a: 5}, :a, %{lte: 3})
end
test "check_numericality/3: successes" do
assert check_numericality(%{a: 3}, :a, %{equal_to: 3}) == true
assert check_numericality(%{a: 3}, :a, %{eq: 3}) == true
assert check_numericality(%{a: 5}, :a, %{greater_than: 3}) == true
assert check_numericality(%{a: 5}, :a, %{gt: 3}) == true
assert check_numericality(%{a: 3}, :a, %{greater_than_or_equal_to: 3}) == true
assert check_numericality(%{a: 3}, :a, %{gte: 3}) == true
assert check_numericality(%{a: 5}, :a, %{greater_than_or_equal_to: 3}) == true
assert check_numericality(%{a: 5}, :a, %{gte: 3}) == true
assert check_numericality(%{a: 2}, :a, %{less_than: 3}) == true
assert check_numericality(%{a: 2}, :a, %{lt: 3}) == true
assert check_numericality(%{a: 3}, :a, %{less_than_or_equal_to: 3}) == true
assert check_numericality(%{a: 3}, :a, %{lte: 3}) == true
assert check_numericality(%{a: 2}, :a, %{less_than_or_equal_to: 3}) == true
assert check_numericality(%{a: 2}, :a, %{lte: 3}) == true
end
test "check_in/3: returns true if check values is not a list" do
assert check_in(%{a: 1}, :a, 2) == true
end
test "check_in/3: returns true if item is in check values list" do
assert check_in(%{a: 1}, :a, [1, 2, 3]) == true
end
test "check_in/3: returns %{item_name => error_msg} if item is not in check values list" do
%{a: _} = check_in(%{a: 4}, :a, [1, 2, 3])
end
test "check_not_in/3: returns true if check values is not a list" do
assert check_not_in(%{a: 1}, :a, 2) == true
end
test "check_not_in/3: returns true if item is not in check values list" do
assert check_not_in(%{a: 4}, :a, [1, 2, 3]) == true
end
test "check_not_in/3: returns %{item_name => error_msg} if item is in check values list" do
%{a: _} = check_not_in(%{a: 3}, :a, [1, 2, 3])
end
test "check_format/3: returns true unless item is not a string" do
assert check_format(%{a: 1}, :a, ~r/a/) == true
end
test "check_format/3: returns true if item is in valid format" do
assert check_format(%{a: "bar"}, :a, ~r/bar/) == true
end
test "check_format/3: returns %{item_name => error_msg} unless item is in valid format" do
%{a: _} = check_format(%{a: "foo"}, :a, ~r/bar/)
end
test "check_regex/3: returns true unless item is not a string" do
assert check_regex(%{a: 1}, :a, ~r/a/) == true
end
test "check_regex/3: returns true if item is in valid format" do
assert check_regex(%{a: "bar"}, :a, ~r/bar/) == true
end
test "check_regex/3: returns %{item_name => error_msg} unless item is in valid format" do
%{a: _} = check_regex(%{a: "foo"}, :a, ~r/bar/)
end
test "check_length/3: treat nil item's length as 0" do
assert check_length(%{}, :a, %{min: 0}) == [true]
end
test "check_length/3: successes" do
assert check_length(%{a: "123"}, :a, %{min: 0}) == [true]
assert check_length(%{a: "123"}, :a, %{max: 4}) == [true]
assert check_length(%{a: "123"}, :a, %{is: 3}) == [true]
assert check_length(%{a: "123"}, :a, %{in: 2..4}) == [true]
assert check_length(%{a: 3}, :a, %{min: 0}) == [true]
assert check_length(%{a: 3}, :a, %{max: 4}) == [true]
assert check_length(%{a: 3}, :a, %{is: 3}) == [true]
assert check_length(%{a: 3}, :a, %{in: 2..4}) == [true]
assert check_length(%{a: ~w(1 2 3)}, :a, %{min: 0}) == [true]
assert check_length(%{a: ~w(1 2 3)}, :a, %{max: 4}) == [true]
assert check_length(%{a: ~w(1 2 3)}, :a, %{is: 3}) == [true]
assert check_length(%{a: ~w(1 2 3)}, :a, %{in: 2..4}) == [true]
end
test "check_length/3: fails" do
[%{a: _}] = check_length(%{a: "123"}, :a, %{min: 4})
[%{a: _}] = check_length(%{a: "123"}, :a, %{max: 2})
[%{a: _}] = check_length(%{a: "123"}, :a, %{is: 4})
[%{a: _}] = check_length(%{a: "123"}, :a, %{in: 4..6})
[%{a: _}] = check_length(%{a: 3}, :a, %{min: 4})
[%{a: _}] = check_length(%{a: 3}, :a, %{max: 2})
[%{a: _}] = check_length(%{a: 3}, :a, %{is: 4})
[%{a: _}] = check_length(%{a: 3}, :a, %{in: 4..6})
[%{a: _}] = check_length(%{a: ~w(1 2 3)}, :a, %{min: 4})
[%{a: _}] = check_length(%{a: ~w(1 2 3)}, :a, %{max: 2})
[%{a: _}] = check_length(%{a: ~w(1 2 3)}, :a, %{is: 4})
[%{a: _}] = check_length(%{a: ~w(1 2 3)}, :a, %{in: 4..6})
end
test "check_struct/3: successes" do
struct = %TestStruct{qwerty: "123"}
assert check_struct(%{a: struct}, :a, %TestStruct{}) == true
assert check_struct(%{a: struct}, :a, TestStruct) == true
end
test "check_struct/3: fails" do
assert check_struct(%{a: %TestStruct2{}}, :a, %TestStruct{}) == %{a: "is not expected struct"}
assert check_struct(%{a: %TestStruct2{qwerty: "123"}}, :a, %TestStruct{}) == %{a: "is not expected struct"}
assert check_struct(%{a: %TestStruct2{}}, :a, %TestStruct{qwerty: "123"}) == %{a: "is not expected struct"}
assert check_struct(%{a: %TestStruct2{qwerty: "123"}}, :a, %TestStruct{qwerty: "123"}) == %{a: "is not expected struct"}
assert check_struct(%{a: %TestStruct2{}}, :a, TestStruct) == %{a: "is not expected struct"}
assert check_struct(%{a: %TestStruct2{qwerty: "123"}}, :a, TestStruct) == %{a: "is not expected struct"}
end
test "check_equals/3: success" do
assert check_equals(%{a: 1.0}, :a, 1.0) == true
assert check_equals(%{a: :a}, :a, :a) == true
assert check_equals(%{a: [b: 2, c: 3]}, :a, [b: 2, c: 3]) == true
assert check_equals(%{a: [b: 2, c: 3]}, :a, [{:b, 2}, {:c, 3}]) == true
assert check_equals(%{a: %{b: 2, c: 3}}, :a, %{b: 2, c: 3}) == true
end
test "check_equals/3: fails" do
assert check_equals(%{a: 1.0}, :a, 1) == %{a: "must be equal to 1"}
assert check_equals(%{a: 1.0}, :a, 1.1) == %{a: "must be equal to 1.1"}
assert check_equals(%{a: :a}, :a, :b) == %{a: "must be equal to :b"}
assert check_equals(%{a: [b: 2, c: 3]}, :a, [b: 2, c: 1]) == %{a: "must be equal to [b: 2, c: 1]"}
assert check_equals(%{a: [b: 2, c: 3]}, :a, [{:b, 2}]) == %{a: "must be equal to [b: 2]"}
assert check_equals(%{a: %{b: 2, c: 3}}, :a, %{b: 2, d: 3}) == %{a: "must be equal to %{b: 2, d: 3}"}
end
test "check_exactly/3: success" do
assert check_exactly(%{a: 1.0}, :a, 1.0) == true
assert check_exactly(%{a: :a}, :a, :a) == true
assert check_exactly(%{a: [b: 2, c: 3]}, :a, [b: 2, c: 3]) == true
assert check_exactly(%{a: [b: 2, c: 3]}, :a, [{:b, 2}, {:c, 3}]) == true
assert check_exactly(%{a: %{b: 2, c: 3}}, :a, %{b: 2, c: 3}) == true
end
test "check_exactly/3: fails" do
assert check_exactly(%{a: 1.0}, :a, 1) == %{a: "must be equal to 1"}
assert check_exactly(%{a: 1.0}, :a, 1.1) == %{a: "must be equal to 1.1"}
assert check_exactly(%{a: :a}, :a, :b) == %{a: "must be equal to :b"}
assert check_exactly(%{a: [b: 2, c: 3]}, :a, [b: 2, c: 1]) == %{a: "must be equal to [b: 2, c: 1]"}
assert check_exactly(%{a: [b: 2, c: 3]}, :a, [{:b, 2}]) == %{a: "must be equal to [b: 2]"}
assert check_exactly(%{a: %{b: 2, c: 3}}, :a, %{b: 2, d: 3}) == %{a: "must be equal to %{b: 2, d: 3}"}
end
def validation(params, :a, param_value), do: validation(params, param_value)
def validation(_params, :b, _param_value), do: false
def validation(_params, param_value) do
param_value > 99
end
def validation_verbose(params, :a, param_value), do: validation_verbose(params, param_value)
def validation_verbose(_params, :b, _param_value), do: false
def validation_verbose(_params, param_value) do
if param_value > 99 do
true
else
{:error, "Custom error message"}
end
end
test "check_func/3: success" do
assert check_func(%{a: 100}, :a, &__MODULE__.validation/2) == true
assert check_func(%{a: 100}, :a, &__MODULE__.validation_verbose/2) == true
end
test "check_func/3: fails" do
assert check_func(%{a: 98}, :a, &__MODULE__.validation/2) == %{a: "isn't valid"}
assert check_func(%{a: 98}, :a, &__MODULE__.validation_verbose/2) == %{a: "Custom error message"}
end
test "check_func/3: validation func can expect 3 args: params, param_name and param_value " do
assert check_func(%{a: 100}, :a, &__MODULE__.validation/3) == true
assert check_func(%{a: 100}, :a, &__MODULE__.validation_verbose/3) == true
assert check_func(%{b: 100}, :b, &__MODULE__.validation_verbose/3) == %{b: "isn't valid"}
assert check_func(%{a: 98}, :a, &__MODULE__.validation/3) == %{a: "isn't valid"}
assert check_func(%{a: 98}, :a, &__MODULE__.validation_verbose/3) == %{a: "Custom error message"}
assert check_func(%{b: 98}, :b, &__MODULE__.validation_verbose/3) == %{b: "isn't valid"}
end
test "check_numericality/3: aliases" do
assert check_numericality(%{a: 3}, :a, %{equals: 3}) == true
assert check_numericality(%{a: 2}, :a, %{equals: 3}) == [%{a: "must be equal to 3"}]
assert check_numericality(%{a: 3}, :a, %{is: 3}) == true
assert check_numericality(%{a: 2}, :a, %{is: 3}) == [%{a: "must be equal to 3"}]
assert check_numericality(%{a: 3}, :a, %{min: 1}) == true
assert check_numericality(%{a: 1}, :a, %{min: 3}) == [%{a: "must be greater than or equal to 3"}]
assert check_numericality(%{a: 1}, :a, %{max: 3}) == true
assert check_numericality(%{a: 3}, :a, %{max: 1}) == [%{a: "must be less than or equal to 1"}]
end
end
| 43.422619 | 124 | 0.586361 |
9ee59b6edabf8a93866678ceda6d48e83f3c94d9 | 379 | exs | Elixir | priv/repo/seeds.exs | normanpatrick/simple-nested-resources | ff93b20d55cc621830942ff7806dec0ded42997f | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | normanpatrick/simple-nested-resources | ff93b20d55cc621830942ff7806dec0ded42997f | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | normanpatrick/simple-nested-resources | ff93b20d55cc621830942ff7806dec0ded42997f | [
"MIT"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# SimpleNestedResources.Repo.insert!(%SimpleNestedResources.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 31.583333 | 77 | 0.728232 |
9ee61d8c091f86fee3cc2dd347e006ac19369dd8 | 2,665 | ex | Elixir | apps/eigr_functions/lib/functions/supervisor.ex | sleipnir/permastate-elixir | 43cda3a186819c9cfd3ce993907f24251c9ef42b | [
"Apache-2.0"
] | 2 | 2021-07-28T23:02:24.000Z | 2021-07-29T00:34:42.000Z | apps/eigr_functions/lib/functions/supervisor.ex | sleipnir/permastate-elixir | 43cda3a186819c9cfd3ce993907f24251c9ef42b | [
"Apache-2.0"
] | null | null | null | apps/eigr_functions/lib/functions/supervisor.ex | sleipnir/permastate-elixir | 43cda3a186819c9cfd3ce993907f24251c9ef42b | [
"Apache-2.0"
] | 1 | 2021-07-28T21:31:12.000Z | 2021-07-28T21:31:12.000Z | defmodule Functions.Supervisor do
@moduledoc """
A Eigr Functions Elixir SDK.
You can add it to your Supervisor tree as below.
defmodule Shoppingcart.Application do
@moduledoc false
use Application
@impl true
def start(_type, _args) do
action = %Functions.Entity{
name: Atom.to_string(Com.Example.Shoppingcart.ShoppingCart),
type: :action,
service: Com.Example.Shoppingcart.ShoppingCart.Service,
descriptor: File.read(Path.join(:code.priv_dir(:shoppingcart), "user-function.desc"))
}
entity_spec = %Functions.EntitySpec{
service_name: Atom.to_string(Com.Example.Shoppingcart.ShoppingCart),
entities: [action],
port: 8080
}
children = [
{Functions.Supervisor, entity_spec}
]
opts = [strategy: :one_for_one, name: Shoppingcart.Supervisor]
Supervisor.start_link(children, opts)
end
end
"""
use Supervisor
alias Functions.{Entity, EntitySpec}
@impl true
def init(entity_spec) do
Application.put_env(:grpc, :start_server, true, persistent: true)
children =
[
{Registry, keys: :unique, name: FunctionsRegistry}
] ++
get_discovery(entity_spec) ++
get_children(entity_spec) ++
get_grpc(entity_spec)
Supervisor.init(children, strategy: :one_for_one)
end
@spec start_link(any) :: :ignore | {:error, any} | {:ok, pid}
@doc false
def start_link(entity_spec) do
Supervisor.start_link(__MODULE__, entity_spec, name: __MODULE__)
end
defp get_children(%EntitySpec{} = entity_spec) do
entity_spec.entities
|> Enum.map(®ister_supervisor/1)
|> Enum.reduce([], fn elem, acc ->
[elem] ++ acc
end)
end
defp register_supervisor(%Entity{type: type, service: service} = _entity) do
case type do
:action ->
put_env(:action_service, service)
Functions.Action.Supervisor
:crdt ->
put_env(:crdt_service, service)
Functions.CRDT.Supervisor
:event_sourced ->
put_env(:event_sourced_service, service)
Functions.EventSourced.Supervisor
:value_entity ->
put_env(:value_service, service)
Functions.ValueEntity.Supervisor
_ ->
{nil, nil}
end
end
defp get_discovery(entity_spec),
do: [{Functions.EntityDiscovery.Handler, entity_spec}]
defp get_grpc(%EntitySpec{port: port} = _entity_spec),
do: [{GRPC.Server.Supervisor, {Functions.Endpoint, port}}]
defp put_env(key, value),
do: Application.put_env(:eigr_functions, key, value, persistent: true)
end
| 26.386139 | 95 | 0.64728 |
9ee630b7edc00efd72c0ff3fb07a14f0d783c8d5 | 1,738 | ex | Elixir | clients/content/lib/google_api/content/v2/model/orders_create_test_return_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/orders_create_test_return_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/orders_create_test_return_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Content.V2.Model.OrdersCreateTestReturnRequest do
@moduledoc """
## Attributes
- items ([OrdersCustomBatchRequestEntryCreateTestReturnReturnItem]): Returned items. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:items =>
list(
GoogleApi.Content.V2.Model.OrdersCustomBatchRequestEntryCreateTestReturnReturnItem.t()
)
}
field(
:items,
as: GoogleApi.Content.V2.Model.OrdersCustomBatchRequestEntryCreateTestReturnReturnItem,
type: :list
)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.OrdersCreateTestReturnRequest do
def decode(value, options) do
GoogleApi.Content.V2.Model.OrdersCreateTestReturnRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.OrdersCreateTestReturnRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.6 | 107 | 0.746835 |
9ee63265bd7fbd3dd6b33655ae3fe9731f5da571 | 17,279 | ex | Elixir | lib/logger/lib/logger.ex | Tica2/elixir | 6cf1dcbfe4572fc75619f05e40c10fd0844083ef | [
"Apache-2.0"
] | null | null | null | lib/logger/lib/logger.ex | Tica2/elixir | 6cf1dcbfe4572fc75619f05e40c10fd0844083ef | [
"Apache-2.0"
] | null | null | null | lib/logger/lib/logger.ex | Tica2/elixir | 6cf1dcbfe4572fc75619f05e40c10fd0844083ef | [
"Apache-2.0"
] | null | null | null | defmodule Logger do
@moduledoc ~S"""
A logger for Elixir applications.
It includes many features:
* Provides debug, info, warn and error levels.
* Supports multiple backends which are automatically
supervised when plugged into Logger.
* Formats and truncates messages on the client
to avoid clogging logger backends.
* Alternates between sync and async modes to remain
performant when required but also apply backpressure
when under stress.
* Wraps OTP's `error_logger` to prevent it from
overflowing.
## Levels
The supported levels are:
* `:debug` - for debug-related messages
* `:info` - for information of any kind
* `:warn` - for warnings
* `:error` - for errors
## Configuration
Logger supports a wide range of configurations.
This configuration is split in three categories:
* Application configuration - must be set before the logger
application is started
* Runtime configuration - can be set before the logger
application is started, but may be changed during runtime
* Error logger configuration - configuration for the
wrapper around OTP's `error_logger`
### Application configuration
The following configuration must be set via config files
before the logger application is started.
* `:backends` - the backends to be used. Defaults to `[:console]`.
See the "Backends" section for more information.
* `:compile_time_purge_level` - purge all calls that have log level
lower than the configured value at compilation time. This means the
Logger call will be completely removed at compile time, accruing
no overhead at runtime. Defaults to `:debug` and only
applies to the `Logger.debug`, `Logger.info`, etc style of calls.
* `:compile_time_application` - sets the `:application` metadata value
to the configured value at compilation time. This configuration is
usually only useful for build tools to automatically add the
application to the metadata for `Logger.debug`, `Logger.info`, etc
style of calls.
For example, to configure the `:backends` and `compile_time_purge_level`
in a `config/config.exs` file:
config :logger,
backends: [:console],
compile_time_purge_level: :info
### Runtime Configuration
All configuration below can be set via config files but also
changed dynamically during runtime via `Logger.configure/1`.
* `:level` - the logging level. Attempting to log any message
with severity less than the configured level will simply
cause the message to be ignored. Keep in mind that each backend
may have its specific level, too.
* `:utc_log` - when `true`, uses UTC in logs. By default it uses
local time (i.e. it defaults to `false`).
* `:truncate` - the maximum message size to be logged. Defaults
to 8192 bytes. Note this configuration is approximate. Truncated
messages will have `" (truncated)"` at the end.
* `:sync_threshold` - if the logger manager has more than
`sync_threshold` messages in its queue, Logger will change
to sync mode, to apply backpressure to the clients.
Logger will return to async mode once the number of messages
in the queue is reduced to `sync_threshold * 0.75` messages.
Defaults to 20 messages.
* `:translator_inspect_opts` - when translating OTP reports and
errors, the last message and state must be inspected in the
error reports. This configuration allow developers to change
how much and how the data should be inspected.
For example, to configure the `:level` and `:truncate` in a
`config/config.exs` file:
config :logger,
level: :warn,
truncate: 4096
### Error logger configuration
The following configuration applies to the Logger wrapper around
Erlang's `error_logger`. All the configurations below must be set
before the logger application starts.
* `:handle_otp_reports` - redirects OTP reports to Logger so
they are formatted in Elixir terms. This uninstalls Erlang's
logger that prints terms to terminal. Defaults to `true`.
* `:handle_sasl_reports` - redirects supervisor, crash and
progress reports to Logger so they are formatted in Elixir
terms. This uninstalls `sasl`'s logger that prints these
reports to the terminal. Defaults to `false`.
* `:discard_threshold_for_error_logger` - a value that, when
reached, triggers the error logger to discard messages. This
value must be a positive number that represents the maximum
number of messages accepted per second. Once above this
threshold, the `error_logger` enters discard mode for the
remainder of that second. Defaults to 500 messages.
For example, to configure Logger to redirect all `error_logger` messages
using a `config/config.exs` file:
config :logger,
handle_otp_reports: true,
handle_sasl_reports: true
Furthermore, Logger allows messages sent by Erlang's `error_logger`
to be translated into an Elixir format via translators. Translators
can be dynamically added at any time with the `add_translator/1`
and `remove_translator/1` APIs. Check `Logger.Translator` for more
information.
## Backends
Logger supports different backends where log messages are written to.
The available backends by default are:
* `:console` - logs messages to the console (enabled by default)
Developers may also implement their own backends, an option that
is explored with detail below.
The initial backends are loaded via the `:backends` configuration,
which must be set before the logger application is started.
### Console backend
The console backend logs message to the console. It supports the
following options:
* `:level` - the level to be logged by this backend.
Note that messages are first filtered by the general
`:level` configuration in `:logger`
* `:format` - the format message used to print logs.
Defaults to: `"$time $metadata[$level] $levelpad$message\n"`
* `:metadata` - the metadata to be printed by `$metadata`.
Defaults to an empty list (no metadata)
* `:colors` - a keyword list of coloring options.
The supported keys in the `:colors` keyword list are:
* `:enabled` - boolean value that allows for switching the
coloring on and off. Defaults to: `IO.ANSI.enabled?`
* `:debug` - color for debug messages. Defaults to: `:cyan`
* `:info` - color for info messages. Defaults to: `:normal`
* `:warn` - color for warn messages. Defaults to: `:yellow`
* `:error` - color for error messages. Defaults to: `:red`
See the `IO.ANSI` module for a list of colors and attributes.
Here is an example of how to configure the `:console` backend in a
`config/config.exs` file:
config :logger, :console,
format: "\n$time $metadata[$level] $levelpad$message\n"
metadata: [:user_id]
You can read more about formatting in `Logger.Formatter`.
### Custom backends
Any developer can create their own backend for Logger.
Since Logger is an event manager powered by `GenEvent`,
writing a new backend is a matter of creating an event
handler, as described in the `GenEvent` module.
From now on, we will be using the term "event handler" to refer
to your custom backend, as we head into implementation details.
Once Logger starts, it installs all event handlers under
the `:backends` configuration into the Logger event manager.
The event manager and all added event handlers are
automatically supervised by Logger.
Once initialized, the handler should be designed to handle events
in the following format:
{level, group_leader,
{Logger, message, timestamp, metadata}}
The level is one of `:debug`, `:info`, `:warn` or `:error`,
as previously described, the group leader is the group
leader of the process who logged the message, followed by
a tuple starting with the atom `Logger`, the message as
chardata, the timestamp and a keyword list of metadata.
It is recommended that handlers ignore messages where
the group leader is in a different node than the one
the handler is installed.
Furthermore, backends can be configured via the
`configure_backend/2` function which requires event handlers
to handle calls of the following format:
{:configure, options}
where options is a keyword list. The result of the call is
the result returned by `configure_backend/2`. The recommended
return value for successful configuration is `:ok`.
It is recommended that backends support at least the following
configuration values:
* `level` - the logging level for that backend
* `format` - the logging format for that backend
* `metadata` - the metadata to include the backend
Check the implementation for `Logger.Backends.Console` for
examples on how to handle the recommendations in this section
and how to process the existing options.
"""
@type backend :: GenEvent.handler
@type message :: IO.chardata | String.Chars.t
@type level :: :error | :info | :warn | :debug
@levels [:error, :info, :warn, :debug]
@metadata :logger_metadata
@compile {:inline, __metadata__: 0}
defp __metadata__ do
Process.get(@metadata) || {true, []}
end
@doc """
Adds the given keyword list to the current process metadata.
"""
def metadata(dict) do
{enabled, metadata} = __metadata__()
metadata =
Enum.reduce(dict, metadata, fn
{key, nil}, acc -> Keyword.delete(acc, key)
{key, val}, acc -> Keyword.put(acc, key, val)
end)
Process.put(@metadata, {enabled, metadata})
:ok
end
@doc """
Reads the current process metadata.
"""
def metadata() do
__metadata__() |> elem(1)
end
@doc """
Enables logging for the current process.
Currently the only accepted process is self().
"""
def enable(pid) when pid == self() do
Process.put(@metadata, {true, metadata()})
:ok
end
@doc """
Disables logging for the current process.
Currently the only accepted process is self().
"""
def disable(pid) when pid == self() do
Process.put(@metadata, {false, metadata()})
:ok
end
@doc """
Retrieves the logger level.
The logger level can be changed via `configure/1`.
"""
@spec level() :: level
def level() do
%{level: level} = Logger.Config.__data__
level
end
@doc """
Compare log levels.
Receives two log levels and compares the `left`
against `right` and returns `:lt`, `:eq` or `:gt`.
"""
@spec compare_levels(level, level) :: :lt | :eq | :gt
def compare_levels(level, level), do:
:eq
def compare_levels(left, right), do:
if(level_to_number(left) > level_to_number(right), do: :gt, else: :lt)
defp level_to_number(:debug), do: 0
defp level_to_number(:info), do: 1
defp level_to_number(:warn), do: 2
defp level_to_number(:error), do: 3
@doc """
Configures the logger.
See the "Runtime Configuration" section in `Logger` module
documentation for the available options.
"""
@valid_options [:compile_time_purge_level, :compile_time_application, :sync_threshold, :truncate, :level, :utc_log]
def configure(options) do
Logger.Config.configure(Dict.take(options, @valid_options))
end
@doc """
Flushes the Logger.
This basically guarantees all messages sent to the
Logger prior to this call will be processed. This is useful
for testing and it should not be called in production code.
"""
@spec flush :: :ok
def flush do
_ = GenEvent.which_handlers(:error_logger)
_ = GenEvent.which_handlers(Logger)
:ok
end
@doc """
Adds a new backend.
## Options
* `:flush` - when `true`, guarantees all messages currently sent
to both Logger and Erlang's `error_logger` are processed before
the backend is added
"""
def add_backend(backend, opts \\ []) do
_ = if opts[:flush], do: GenEvent.which_handlers(:error_logger)
case Logger.Watcher.watch(Logger, Logger.Config.translate_backend(backend), backend) do
{:ok, _} = ok ->
Logger.Config.add_backend(backend)
ok
{:error, _} = error ->
error
end
end
@doc """
Removes a backend.
## Options
* `:flush` - when `true`, guarantees all messages currently sent
to both Logger and Erlang's `error_logger` are processed before
the backend is removed
"""
def remove_backend(backend, opts \\ []) do
_ = if opts[:flush], do: GenEvent.which_handlers(:error_logger)
Logger.Config.remove_backend(backend)
Logger.Watcher.unwatch(Logger, Logger.Config.translate_backend(backend))
end
@doc """
Adds a new translator.
"""
def add_translator({mod, fun} = translator) when is_atom(mod) and is_atom(fun) do
Logger.Config.add_translator(translator)
end
@doc """
Removes a translator.
"""
def remove_translator({mod, fun} = translator) when is_atom(mod) and is_atom(fun) do
Logger.Config.remove_translator(translator)
end
@doc """
Configures the given backend.
The backends needs to be started and running in order to
be configured at runtime.
"""
@spec configure_backend(backend, Keyword.t) :: term
def configure_backend(backend, options) when is_list(options) do
GenEvent.call(Logger, Logger.Config.translate_backend(backend), {:configure, options})
end
@doc """
Logs a message dynamically.
Use this function only when there is a need to
explicitly avoid embedding metadata.
"""
@spec bare_log(level, message | (() -> message), Keyword.t) ::
:ok | {:error, :noproc} | {:error, term}
def bare_log(level, chardata_or_fn, metadata \\ [])
when level in @levels and is_list(metadata) do
case __metadata__() do
{true, pdict} ->
%{mode: mode, truncate: truncate,
level: min_level, utc_log: utc_log?} = Logger.Config.__data__
if compare_levels(level, min_level) != :lt do
metadata = [pid: self()] ++ Keyword.merge(pdict, metadata)
tuple = {Logger, truncate(chardata_or_fn, truncate),
Logger.Utils.timestamp(utc_log?), metadata}
try do
notify(mode, {level, Process.group_leader(), tuple})
:ok
rescue
ArgumentError -> {:error, :noproc}
catch
:exit, reason -> {:error, reason}
end
else
:ok
end
{false, _} ->
:ok
end
end
@doc """
Logs a warning.
## Examples
Logger.warn "knob turned too far to the right"
Logger.warn fn -> "expensive to calculate warning" end
"""
defmacro warn(chardata_or_fn, metadata \\ []) do
maybe_log(:warn, chardata_or_fn, metadata, __CALLER__)
end
@doc """
Logs some info.
## Examples
Logger.info "mission accomplished"
Logger.info fn -> "expensive to calculate info" end
"""
defmacro info(chardata_or_fn, metadata \\ []) do
maybe_log(:info, chardata_or_fn, metadata, __CALLER__)
end
@doc """
Logs an error.
## Examples
Logger.error "oops"
Logger.error fn -> "expensive to calculate error" end
"""
defmacro error(chardata_or_fn, metadata \\ []) do
maybe_log(:error, chardata_or_fn, metadata, __CALLER__)
end
@doc """
Logs a debug message.
## Examples
Logger.debug "hello?"
Logger.debug fn -> "expensive to calculate debug" end
"""
defmacro debug(chardata_or_fn, metadata \\ []) do
maybe_log(:debug, chardata_or_fn, metadata, __CALLER__)
end
@doc """
Logs a message.
Developers should rather use the macros `Logger.debug/2`,
`Logger.warn/2`, `Logger.info/2` or `Logger.error/2` instead
of this macro as they can automatically eliminate
the Logger call altogether at compile time if desired.
"""
defmacro log(level, chardata_or_fn, metadata \\ []) do
macro_log(level, chardata_or_fn, metadata, __CALLER__)
end
defp macro_log(level, data, metadata, caller) do
%{module: module, function: fun, line: line} = caller
caller = [module: module, function: form_fa(fun), line: line]
if app = Application.get_env(:logger, :compile_time_application) do
caller = [application: app] ++ caller
end
quote do
Logger.bare_log(unquote(level), unquote(data), unquote(caller) ++ unquote(metadata))
end
end
defp maybe_log(level, data, metadata, caller) do
min_level = Application.get_env(:logger, :compile_time_purge_level, :debug)
if compare_levels(level, min_level) != :lt do
macro_log(level, data, metadata, caller)
else
:ok
end
end
defp truncate(data, n) when is_function(data, 0),
do: Logger.Utils.truncate(data.(), n)
defp truncate(data, n) when is_list(data) or is_binary(data),
do: Logger.Utils.truncate(data, n)
defp truncate(data, n),
do: Logger.Utils.truncate(to_string(data), n)
defp form_fa({name, arity}) do
Atom.to_string(name) <> "/" <> Integer.to_string(arity)
end
defp form_fa(nil), do: nil
defp notify(:sync, msg), do: GenEvent.sync_notify(Logger, msg)
defp notify(:async, msg), do: GenEvent.notify(Logger, msg)
end
| 31.021544 | 117 | 0.686035 |
9ee64c1984bf3dde946c7a31f9d6d05133d03c08 | 3,918 | ex | Elixir | lib/trans/query_builder.ex | goravbhootra/trans | d8976406c801639a16b4be0edd163998eb23ddd9 | [
"Apache-2.0"
] | null | null | null | lib/trans/query_builder.ex | goravbhootra/trans | d8976406c801639a16b4be0edd163998eb23ddd9 | [
"Apache-2.0"
] | null | null | null | lib/trans/query_builder.ex | goravbhootra/trans | d8976406c801639a16b4be0edd163998eb23ddd9 | [
"Apache-2.0"
] | null | null | null | if Code.ensure_loaded?(Ecto.Adapters.SQL) do
defmodule Trans.QueryBuilder do
@moduledoc """
Adds conditions to `Ecto` queries on translated fields.
"""
@doc """
Generates a SQL fragment for accessing a translated field in an `Ecto.Query`.
The generated SQL fragment can be coupled with the rest of the functions and
operators provided by `Ecto.Query` and `Ecto.Query.API`.
## Safety
This macro will emit errors when used with untranslatable
schema modules or fields. Errors are emited during the compilation phase
thus avoiding runtime errors after the queries are built.
## Usage examples
Imagine that we have an _Article_ schema declared as follows:
defmodule Article do
use Ecto.Schema
use Trans, translates: [:title, :body]
schema "articles" do
field :title, :string
field :body, :string
field :translations, :map
end
end
**Query for items that have a certain translation**
This `Ecto.Query` will return all _Articles_ that have an Spanish translation:
iex> Repo.all(from a in Article,
...> where: not is_nil(translated(Article, a, :es)))
The generated SQL is:
SELECT a0."id", a0."title", a0."body", a0."translations"
FROM "articles" AS a0
WHERE (NOT ((a0."translations"->"es") IS NULL))
**Query for items with a certain translated value**
This query will return all articles whose French title matches the _"Elixir"_:
iex> Repo.all(from a in Article,
...> where: translated(Article, a.title, :fr) == "Elixir")
The generated SQL is:
SELECT a0."id", a0."title", a0."body", a0."translations"
FROM "articles" AS a0
WHERE ((a0."translations"->"fr"->>"title") = "Elixir")
**Query for items using a case insensitive comparison**
This query will return all articles that contain "elixir" in their Spanish
body, igoring case.
iex> Repo.all(from a in Article,
...> where: ilike(translated(Article, a.body, :es), "%elixir%"))
The generated SQL is:
SELECT a0."id", a0."title", a0."body", a0."translations"
FROM "articles" AS a0
WHERE ((a0."translations"->"es"->>"body") ILIKE "%elixir%")
**More complex queries**
The `translated/3` macro can also be used with relations and joined schemas.
For more complex examples take a look at the QueryBuilder tests (the file
is locaed in `test/query_builder_test.ex`).
"""
defmacro translated(module, translatable, locale) do
with field <- field(translatable) do
module = Macro.expand(module, __CALLER__)
validate_field(module, field)
generate_query(schema(translatable), module, field, locale)
end
end
defp generate_query(schema, module, nil, locale) do
quote do
fragment(
"(?->?)",
field(unquote(schema), unquote(module.__trans__(:container))),
^to_string(unquote(locale))
)
end
end
defp generate_query(schema, module, field, locale) do
quote do
fragment(
"(?->?->>?)",
field(unquote(schema), unquote(module.__trans__(:container))),
^to_string(unquote(locale)),
^unquote(field)
)
end
end
defp schema({{:., _, [schema, _field]}, _metadata, _args}), do: schema
defp schema(schema), do: schema
defp field({{:., _, [_schema, field]}, _metadata, _args}), do: to_string(field)
defp field(_), do: nil
defp validate_field(module, field) do
cond do
is_nil(field) ->
nil
not Trans.translatable?(module, field) ->
raise ArgumentError,
message: "'#{inspect(module)}' module must declare '#{field}' as translatable"
true ->
nil
end
end
end
end
| 29.908397 | 90 | 0.616386 |
9ee69421c5b6ec0d58422aaf9bb8219d9150cc3d | 788 | ex | Elixir | Microsoft.Azure.Management.Features/lib/microsoft/azure/management/features/model/feature_operations_list_result.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Features/lib/microsoft/azure/management/features/model/feature_operations_list_result.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Features/lib/microsoft/azure/management/features/model/feature_operations_list_result.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Microsoft.Azure.Management.Features.Model.FeatureOperationsListResult do
@moduledoc """
List of previewed features.
"""
@derive [Poison.Encoder]
defstruct [
:"value",
:"nextLink"
]
@type t :: %__MODULE__{
:"value" => [FeatureResult],
:"nextLink" => String.t
}
end
defimpl Poison.Decoder, for: Microsoft.Azure.Management.Features.Model.FeatureOperationsListResult do
import Microsoft.Azure.Management.Features.Deserializer
def decode(value, options) do
value
|> deserialize(:"value", :list, Microsoft.Azure.Management.Features.Model.FeatureResult, options)
end
end
| 26.266667 | 101 | 0.729695 |
9ee7476a19edc4f2011092554d6500e0ca56fb34 | 5,369 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v28/api/conversions.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v28/api/conversions.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v28/api/conversions.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.DFAReporting.V28.Api.Conversions do
@moduledoc """
API calls for all endpoints tagged `Conversions`.
"""
alias GoogleApi.DFAReporting.V28.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Inserts conversions.
## Parameters
- connection (GoogleApi.DFAReporting.V28.Connection): Connection to server
- profile_id (String.t): User profile ID associated with this request.
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :body (ConversionsBatchInsertRequest):
## Returns
{:ok, %GoogleApi.DFAReporting.V28.Model.ConversionsBatchInsertResponse{}} on success
{:error, info} on failure
"""
@spec dfareporting_conversions_batchinsert(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V28.Model.ConversionsBatchInsertResponse.t()}
| {:error, Tesla.Env.t()}
def dfareporting_conversions_batchinsert(
connection,
profile_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/dfareporting/v2.8/userprofiles/{profileId}/conversions/batchinsert", %{
"profileId" => URI.encode_www_form(profile_id)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.DFAReporting.V28.Model.ConversionsBatchInsertResponse{}]
)
end
@doc """
Updates existing conversions.
## Parameters
- connection (GoogleApi.DFAReporting.V28.Connection): Connection to server
- profile_id (String.t): User profile ID associated with this request.
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :body (ConversionsBatchUpdateRequest):
## Returns
{:ok, %GoogleApi.DFAReporting.V28.Model.ConversionsBatchUpdateResponse{}} on success
{:error, info} on failure
"""
@spec dfareporting_conversions_batchupdate(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V28.Model.ConversionsBatchUpdateResponse.t()}
| {:error, Tesla.Env.t()}
def dfareporting_conversions_batchupdate(
connection,
profile_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/dfareporting/v2.8/userprofiles/{profileId}/conversions/batchupdate", %{
"profileId" => URI.encode_www_form(profile_id)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.DFAReporting.V28.Model.ConversionsBatchUpdateResponse{}]
)
end
end
| 38.078014 | 170 | 0.687279 |
9ee753068dd8ee372d893cc18c629874a1d61cca | 614 | ex | Elixir | web/views/bottle_view.ex | maxdec/cellar | 056202b1d90748378e16edc9fdcade8c0491f2b1 | [
"MIT"
] | null | null | null | web/views/bottle_view.ex | maxdec/cellar | 056202b1d90748378e16edc9fdcade8c0491f2b1 | [
"MIT"
] | null | null | null | web/views/bottle_view.ex | maxdec/cellar | 056202b1d90748378e16edc9fdcade8c0491f2b1 | [
"MIT"
] | null | null | null | defmodule Cellar.BottleView do
use Cellar.Web, :view
def render("index.json", %{bottles: bottles}) do
%{data: render_many(bottles, Cellar.BottleView, "bottle.json")}
end
def render("show.json", %{bottle: bottle}) do
%{data: render_one(bottle, Cellar.BottleView, "bottle.json")}
end
def render("bottle.json", %{bottle: bottle}) do
%{
id: bottle.id,
acquisition: bottle.acquisition,
degustation: bottle.degustation,
notes: bottle.notes,
row: bottle.row,
col: bottle.col,
wine: render_one(bottle.wine, Cellar.WineView, "wine.json")
}
end
end
| 25.583333 | 67 | 0.649837 |
9ee76aad6a9d78ca0f85425fc6725f6f70e7b770 | 7,185 | ex | Elixir | clients/content/lib/google_api/content/v21/api/pubsubnotificationsettings.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/content/lib/google_api/content/v21/api/pubsubnotificationsettings.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/content/lib/google_api/content/v21/api/pubsubnotificationsettings.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V21.Api.Pubsubnotificationsettings do
@moduledoc """
API calls for all endpoints tagged `Pubsubnotificationsettings`.
"""
alias GoogleApi.Content.V21.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Retrieves a Merchant Center account's pubsub notification settings.
## Parameters
* `connection` (*type:* `GoogleApi.Content.V21.Connection.t`) - Connection to server
* `merchant_id` (*type:* `String.t`) - The ID of the account for which to get pubsub notification settings.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Content.V21.Model.PubsubNotificationSettings{}}` on success
* `{:error, info}` on failure
"""
@spec content_pubsubnotificationsettings_get(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Content.V21.Model.PubsubNotificationSettings.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def content_pubsubnotificationsettings_get(
connection,
merchant_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{merchantId}/pubsubnotificationsettings", %{
"merchantId" => URI.encode(merchant_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.Content.V21.Model.PubsubNotificationSettings{}]
)
end
@doc """
Register a Merchant Center account for pubsub notifications. Note that cloud topic name should not be provided as part of the request.
## Parameters
* `connection` (*type:* `GoogleApi.Content.V21.Connection.t`) - Connection to server
* `merchant_id` (*type:* `String.t`) - The ID of the account.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.Content.V21.Model.PubsubNotificationSettings.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Content.V21.Model.PubsubNotificationSettings{}}` on success
* `{:error, info}` on failure
"""
@spec content_pubsubnotificationsettings_update(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Content.V21.Model.PubsubNotificationSettings.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def content_pubsubnotificationsettings_update(
connection,
merchant_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url("/{merchantId}/pubsubnotificationsettings", %{
"merchantId" => URI.encode(merchant_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.Content.V21.Model.PubsubNotificationSettings{}]
)
end
end
| 41.293103 | 196 | 0.627975 |
9ee78e95e5098a78ec932dde3f5f71e23f8a8cee | 717 | ex | Elixir | lib/visualizer_web/gettext.ex | ajanes/booster-ui | 63a1cd32f873d11ff2448666692c2f637dfc466b | [
"CC-BY-3.0",
"BSD-3-Clause"
] | null | null | null | lib/visualizer_web/gettext.ex | ajanes/booster-ui | 63a1cd32f873d11ff2448666692c2f637dfc466b | [
"CC-BY-3.0",
"BSD-3-Clause"
] | null | null | null | lib/visualizer_web/gettext.ex | ajanes/booster-ui | 63a1cd32f873d11ff2448666692c2f637dfc466b | [
"CC-BY-3.0",
"BSD-3-Clause"
] | null | null | null | defmodule VisualizerWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import VisualizerWeb.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :visualizer
end
| 28.68 | 72 | 0.684798 |
9ee7df2913139e5b653b4a3d2dd6b1194e98549c | 7,027 | ex | Elixir | services/interop-proxy/lib/interop_proxy/sanitize.ex | RemusW/orchestra | 688c0d2a25d8f73c9c5a6e27d4fe4b2460c2c198 | [
"MIT"
] | null | null | null | services/interop-proxy/lib/interop_proxy/sanitize.ex | RemusW/orchestra | 688c0d2a25d8f73c9c5a6e27d4fe4b2460c2c198 | [
"MIT"
] | null | null | null | services/interop-proxy/lib/interop_proxy/sanitize.ex | RemusW/orchestra | 688c0d2a25d8f73c9c5a6e27d4fe4b2460c2c198 | [
"MIT"
] | null | null | null | defmodule InteropProxy.Sanitize do
@moduledoc """
Translates the interop server responses to our own and vise-versa.
"""
# Aliasing the main messages.
alias InteropProxy.Message.Interop.{
Position, AerialPosition, InteropMission, Obstacles, InteropTelem, Odlc,
OdlcList, InteropMessage
}
# Aliasing the nested messages.
alias InteropProxy.Message.Interop.InteropMission.FlyZone
alias InteropProxy.Message.Interop.Obstacles.{
StationaryObstacle, MovingObstacle
}
def sanitize_mission(nil) do
%InteropMission{
time: time(),
current_mission: false
}
end
def sanitize_mission(mission) do
%InteropMission{
time: time(),
current_mission: true,
air_drop_pos: mission["air_drop_pos"] |> sanitize_position,
fly_zones: mission["fly_zones"] |> sanitize_fly_zones,
home_pos: mission["home_pos"] |> sanitize_position,
waypoints: mission["mission_waypoints"] |> sanitize_aerial_position,
off_axis_pos: mission["off_axis_odlc_pos"] |> sanitize_position,
emergent_pos: mission["emergent_last_known_pos"] |> sanitize_position,
search_area: mission["search_grid_points"] |> sanitize_aerial_position
}
end
defp sanitize_fly_zones(fly_zones) do
fly_zones
|> Enum.map(fn fly_zone ->
%FlyZone{
alt_msl_max: fly_zone["altitude_msl_max"] |> meters,
alt_msl_min: fly_zone["altitude_msl_min"] |> meters,
boundary: fly_zone["boundary_pts"] |> sanitize_position
}
end)
end
def sanitize_obstacles(obstacles) do
%Obstacles{
time: time(),
stationary: obstacles["stationary_obstacles"]
|> sanitize_stationary_obstacles,
moving: obstacles["moving_obstacles"]
|> sanitize_moving_obstacles
}
end
defp sanitize_stationary_obstacles(stationary) do
stationary
|> Enum.map(fn obs ->
%StationaryObstacle{
pos: obs |> sanitize_position,
height: obs["cylinder_height"] |> meters,
radius: obs["cylinder_radius"] |> meters
}
end)
end
defp sanitize_moving_obstacles(moving) do
moving
|> Enum.map(fn obs ->
%MovingObstacle{
pos: obs |> sanitize_aerial_position,
radius: obs["sphere_radius"] |> meters
}
end)
end
def sanitize_outgoing_telemetry(%InteropTelem{} = telem) do
%{
latitude: telem.pos |> sanitize_outgoing_latitude,
longitude: telem.pos |> sanitize_outgoing_longitude,
altitude_msl: telem.pos.alt_msl |> feet,
uas_heading: telem.yaw
}
end
def sanitize_odlc(odlc, image \\ <<>>) do
%Odlc{
time: time(),
id: odlc["id"],
type: odlc["type"] |> string_to_atom(:type),
pos: odlc |> sanitize_position,
orientation: odlc["orientation"] |> sanitize_orientation,
shape: odlc["shape"] |> string_to_atom(:shape),
background_color: odlc["background_color"] |> string_to_atom(:color),
alphanumeric: odlc["alphanumeric"],
alphanumeric_color: odlc["alphanumeric_color"] |> string_to_atom(:color),
description: odlc["description"],
autonomous: odlc["autonomous"],
image: image
}
end
def sanitize_odlc_list(odlcs) do
time = time()
%OdlcList{time: time, list: Enum.map(odlcs, &Map.put(&1, :time, time))}
end
def sanitize_outgoing_odlc(%Odlc{type: :EMERGENT} = odlc) do
outgoing_odlc = %{
type: odlc.type |> atom_to_string,
latitude: odlc.pos |> sanitize_outgoing_latitude,
longitude: odlc.pos |> sanitize_outgoing_longitude,
description: parse_string(odlc.description),
autonomous: odlc.autonomous |> (&(if &1 === nil, do: false, else: &1)).()
}
{outgoing_odlc, odlc.image |> (&(if &1 === nil, do: <<>>, else: &1)).()}
end
def sanitize_outgoing_odlc(%Odlc{} = odlc) do
outgoing_odlc = %{
type: odlc.type |> atom_to_string,
latitude: odlc.pos |> sanitize_outgoing_latitude,
longitude: odlc.pos |> sanitize_outgoing_longitude,
orientation: odlc.orientation |> sanitize_outgoing_orientation,
shape: odlc.shape |> atom_to_string,
background_color: odlc.background_color |> atom_to_string,
alphanumeric: odlc.alphanumeric,
alphanumeric_color: odlc.alphanumeric_color |> atom_to_string,
autonomous: odlc.autonomous |> (&(if &1 === nil, do: false, else: &1)).()
}
{outgoing_odlc, odlc.image |> (&(if &1 === nil, do: <<>>, else: &1)).()}
end
def sanitize_message(text) do
%InteropMessage{
time: time(),
text: text
}
end
defp sort_order(list) do
list
|> Enum.sort(fn a, b -> a["order"] < b["order"] end)
end
defp sanitize_position(pos) when is_list(pos) do
pos
|> sort_order
|> Enum.map(&sanitize_position/1)
end
defp sanitize_position(pos) do
%Position{
lat: pos["latitude"],
lon: pos["longitude"]
}
end
defp sanitize_aerial_position(pos) when is_list(pos) do
pos
|> sort_order
|> Enum.map(&sanitize_aerial_position/1)
end
defp sanitize_aerial_position(pos) do
%AerialPosition{
lat: pos["latitude"],
lon: pos["longitude"],
alt_msl: pos["altitude_msl"] |> meters
}
end
defp sanitize_outgoing_latitude(%Position{} = pos), do: pos.lat
defp sanitize_outgoing_latitude(%AerialPosition{} = pos), do: pos.lat
defp sanitize_outgoing_latitude(nil), do: 0.0
defp sanitize_outgoing_longitude(%Position{} = pos), do: pos.lon
defp sanitize_outgoing_longitude(%AerialPosition{} = pos), do: pos.lon
defp sanitize_outgoing_longitude(nil), do: 0.0
defp sanitize_orientation(string) do
case string do
nil -> :UNKNOWN_ORIENTATION
"n" -> :NORTH
"ne" -> :NORTHEAST
"e" -> :EAST
"se" -> :SOUTHEAST
"s" -> :SOUTH
"sw" -> :SOUTHWEST
"w" -> :WEST
"nw" -> :NORTHWEST
end
end
defp sanitize_outgoing_orientation(nil), do: nil
defp sanitize_outgoing_orientation(atom) do
case atom do
:UNKNOWN_ORIENTATION -> nil
:NORTH -> "n"
:NORTHEAST -> "ne"
:EAST -> "e"
:SOUTHEAST -> "se"
:SOUTH -> "s"
:SOUTHWEST -> "sw"
:WEST -> "w"
:NORTHWEST -> "nw"
end
end
defp meters(feet), do: feet * 0.3048
defp feet(meters), do: meters / 0.3048
defp string_to_atom(nil, :shape), do: :UNKNOWN_SHAPE
defp string_to_atom(nil, :color), do: :UNKNOWN_COLOR
defp string_to_atom(string, _), do: string |> String.upcase |> String.to_atom
defp atom_to_string(nil), do: nil
defp atom_to_string(:UNKNOWN_SHAPE), do: nil
defp atom_to_string(:UNKNOWN_COLOR), do: nil
defp atom_to_string(atom), do: atom |> Atom.to_string |> String.downcase
defp parse_string(<<>>), do: nil
defp parse_string(string), do: string
defp time() do
milliseconds = DateTime.utc_now()
|> DateTime.to_unix(:millisecond)
milliseconds / 1000
end
end
| 29.157676 | 79 | 0.642237 |
9ee8015f866b45dd7288e4430f3688577c5c748a | 1,173 | exs | Elixir | bench/iso.exs | IanLuites/utc_datetime | 0abe1d016e2bc7823860e9f402645e16a885aa5d | [
"MIT"
] | 1 | 2020-01-12T03:40:17.000Z | 2020-01-12T03:40:17.000Z | bench/iso.exs | IanLuites/utc_datetime | 0abe1d016e2bc7823860e9f402645e16a885aa5d | [
"MIT"
] | null | null | null | bench/iso.exs | IanLuites/utc_datetime | 0abe1d016e2bc7823860e9f402645e16a885aa5d | [
"MIT"
] | null | null | null | datetime = DateTime.utc_now()
utc_datetime = UTCDateTime.from_datetime(datetime)
naive_datetime = UTCDateTime.to_naive(utc_datetime)
Benchee.run(
%{
"DateTime" => &DateTime.from_iso8601/1,
"NaiveDateTime" => &NaiveDateTime.from_iso8601/1,
"UTCDateTime" => &UTCDateTime.from_rfc3339/1
},
inputs: %{
"2019-01-01t23:00:01" => "2019-01-01T23:00:01",
"2019-01-01t23:00:01-07:00" => "2019-01-01T23:00:01-07:00"
}
)
Benchee.run(%{
"DateTime" => fn -> DateTime.to_iso8601(datetime) end,
"NaiveDateTime" => fn -> NaiveDateTime.to_iso8601(naive_datetime) end,
"UTCDateTime" => fn -> UTCDateTime.to_rfc3339(utc_datetime) end
})
Benchee.run(
%{
"DateTime" => &DateTime.from_iso8601/1,
"NaiveDateTime" => &NaiveDateTime.from_iso8601/1,
"UTCDateTime" => &UTCDateTime.from_iso8601/1
},
inputs: %{
"2019-01-01t23:00:01" => "2019-01-01T23:00:01",
"2019-01-01t23:00:01-07:00" => "2019-01-01T23:00:01-07:00"
}
)
Benchee.run(%{
"DateTime" => fn -> DateTime.to_iso8601(datetime) end,
"NaiveDateTime" => fn -> NaiveDateTime.to_iso8601(naive_datetime) end,
"UTCDateTime" => fn -> UTCDateTime.to_iso8601(utc_datetime) end
})
| 29.325 | 72 | 0.675192 |
9ee82788e6e843de58b1f1f189f3cdc07b5807a6 | 1,772 | ex | Elixir | test/support/model_case.ex | chayelheinsen/ElixirBook | 40797c0d84c101ae331100a4b8467df95fa9efd6 | [
"MIT"
] | null | null | null | test/support/model_case.ex | chayelheinsen/ElixirBook | 40797c0d84c101ae331100a4b8467df95fa9efd6 | [
"MIT"
] | null | null | null | test/support/model_case.ex | chayelheinsen/ElixirBook | 40797c0d84c101ae331100a4b8467df95fa9efd6 | [
"MIT"
] | null | null | null | defmodule Book.ModelCase do
@moduledoc """
This module defines the test case to be used by
model tests.
You may define functions here to be used as helpers in
your model tests. See `errors_on/2`'s definition as reference.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Book.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Book.ModelCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Book.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Book.Repo, {:shared, self()})
end
:ok
end
@doc """
Helper for returning list of errors in a struct when given certain data.
## Examples
Given a User schema that lists `:name` as a required field and validates
`:password` to be safe, it would return:
iex> errors_on(%User{}, %{password: "password"})
[password: "is unsafe", name: "is blank"]
You could then write your assertion like:
assert {:password, "is unsafe"} in errors_on(%User{}, %{password: "password"})
You can also create the changeset manually and retrieve the errors
field directly:
iex> changeset = User.changeset(%User{}, password: "password")
iex> {:password, "is unsafe"} in changeset.errors
true
"""
def errors_on(struct, data) do
struct.__struct__.changeset(struct, data)
|> Ecto.Changeset.traverse_errors(&Book.ErrorHelpers.translate_error/1)
|> Enum.flat_map(fn {key, errors} -> for msg <- errors, do: {key, msg} end)
end
end
| 26.848485 | 84 | 0.681716 |
9ee82f9451073a522bc7ca3eea27586698f1b5fd | 611 | ex | Elixir | web/controllers/api/v1/researcher_controller.ex | mciastek/emotions-wheel-backend | 072a88b3ad15b2c6d2aad414e6b7cfc8fb4a70bf | [
"MIT"
] | null | null | null | web/controllers/api/v1/researcher_controller.ex | mciastek/emotions-wheel-backend | 072a88b3ad15b2c6d2aad414e6b7cfc8fb4a70bf | [
"MIT"
] | null | null | null | web/controllers/api/v1/researcher_controller.ex | mciastek/emotions-wheel-backend | 072a88b3ad15b2c6d2aad414e6b7cfc8fb4a70bf | [
"MIT"
] | null | null | null | defmodule EmotionsWheelBackend.ResearcherController do
use EmotionsWheelBackend.Web, :controller
alias EmotionsWheelBackend.{Repo, Researcher}
def index(conn, _params) do
researchers = Researcher |> Repo.all
render(conn, "index.json", researchers: researchers)
end
def show(conn, %{"id" => id}) do
researcher = Researcher |> Repo.get_by(id: id)
case researcher do
nil ->
conn
|> put_status(:not_found)
|> render("error.json", message: "Couldn't find matching researcher")
_ -> render(conn, "show.json", researcher: researcher)
end
end
end
| 26.565217 | 77 | 0.672668 |
9ee854b9c622fa719ef204865b01e6df38281a5b | 2,969 | ex | Elixir | lib/mandrill/senders.ex | whitepaperclip/mandrill-elixir | 865388876bf856700c65a83a9a2f581c3e195842 | [
"MIT"
] | null | null | null | lib/mandrill/senders.ex | whitepaperclip/mandrill-elixir | 865388876bf856700c65a83a9a2f581c3e195842 | [
"MIT"
] | null | null | null | lib/mandrill/senders.ex | whitepaperclip/mandrill-elixir | 865388876bf856700c65a83a9a2f581c3e195842 | [
"MIT"
] | null | null | null | defmodule Mandrill.Senders do
@moduledoc """
Senders calls for Mandrill.
"""
@doc """
Return the senders that have tried to
use this account.
"""
def list do
params = [key: Mandrill.key()]
Mandrill.request("senders/list", params)
end
@doc """
Returns the sender domains that have
been added to this account.
"""
def domains do
params = [key: Mandrill.key()]
Mandrill.request("senders/domains", params)
end
@doc """
Adds a sender domain to your account.
Sender domains are added automatically
as you send, but you can use this call
to add them ahead of time.
"""
def add_domain(params) when is_list(params) do
Mandrill.request("senders/add-domain", Enum.concat([key: Mandrill.key()], params))
end
def add_domain(domain) do
params = [
key: Mandrill.key(),
domain: domain
]
Mandrill.request("senders/add-domain", params)
end
@doc """
Checks the SPF and DKIM settings for a
domain. If you haven't already added
this domain to your account, it will be
added automatically.
"""
def check_domain(params) when is_list(params) do
Mandrill.request("senders/check-domain", Enum.concat([key: Mandrill.key()], params))
end
def check_domain(domain) do
params = [
key: Mandrill.key(),
domain: domain
]
Mandrill.request("senders/check-domain", params)
end
@doc """
Sends a verification email in order to
verify ownership of a domain. Domain
verification is an optional step to
confirm ownership of a domain. Once a
domain has been verified in a Mandrill
account, other accounts may not have
their messages signed by that domain
unless they also verify the domain.
This prevents other Mandrill accounts
from sending mail signed by your domain.
"""
def verify_domain(params) when is_list(params) do
Mandrill.request("senders/verify-domain", Enum.concat([key: Mandrill.key()], params))
end
def verify_domain(domain, mailbox) do
params = [
key: Mandrill.key(),
domain: domain,
mailbox: mailbox
]
Mandrill.request("senders/verify-domain", params)
end
@doc """
Return more detailed information about a
single sender, including aggregates of
recent stats
"""
def info(params) when is_list(params) do
Mandrill.request("senders/info", Enum.concat([key: Mandrill.key()], params))
end
def info(address) do
params = [
key: Mandrill.key(),
address: address
]
Mandrill.request("senders/info", params)
end
@doc """
Return the recent history (hourly stats
for the last 30 days) for a sender
"""
def time_series(params) when is_list(params) do
Mandrill.request("senders/time-series", Enum.concat([key: Mandrill.key()], params))
end
def time_series(address) do
params = [
key: Mandrill.key(),
address: address
]
Mandrill.request("senders/time-series", params)
end
end
| 24.138211 | 89 | 0.669586 |
9ee8590589af2788b452ca3577486868d32b3b45 | 3,606 | exs | Elixir | test/romeo/connection_test.exs | LaudateCorpus1/romeo | 22a7280d0908107ebffefd25d7e051fdb9fab0b2 | [
"MIT"
] | 7 | 2018-08-21T15:58:40.000Z | 2020-02-29T20:39:42.000Z | test/romeo/connection_test.exs | LaudateCorpus1/romeo | 22a7280d0908107ebffefd25d7e051fdb9fab0b2 | [
"MIT"
] | null | null | null | test/romeo/connection_test.exs | LaudateCorpus1/romeo | 22a7280d0908107ebffefd25d7e051fdb9fab0b2 | [
"MIT"
] | 5 | 2020-04-18T15:43:13.000Z | 2022-03-17T18:17:38.000Z | defmodule Romeo.ConnectionTest do
use ExUnit.Case
use UserHelper
use Romeo.XML
setup do
romeo = build_user("romeo", tls: true)
juliet = build_user("juliet", resource: "juliet", tls: true)
setup_presence_subscriptions(romeo[:nickname], juliet[:nickname])
{:ok, romeo: romeo, juliet: juliet}
end
test "connection no TLS" do
romeo = build_user("romeo")
{:ok, _pid} = Romeo.Connection.start_link(romeo)
assert_receive {:resource_bound, _}
assert_receive :connection_ready
end
test "connection TLS", %{romeo: romeo} do
{:ok, _pid} = Romeo.Connection.start_link(romeo)
assert_receive {:resource_bound, _}
assert_receive :connection_ready
end
test "sending presence", %{romeo: romeo} do
{:ok, pid} = Romeo.Connection.start_link(romeo)
assert_receive :connection_ready
assert :ok = Romeo.Connection.send(pid, Romeo.Stanza.presence())
assert_receive {:stanza, %Presence{from: from, to: to} = presence}
assert to_string(from) == "romeo@localhost/romeo"
assert to_string(to) == "romeo@localhost/romeo"
assert :ok =
Romeo.Connection.send(pid, Romeo.Stanza.join("[email protected]", "romeo"))
assert_receive {:stanza, %Presence{from: from} = presence}
assert to_string(from) == "[email protected]/romeo"
end
test "resource conflict", %{romeo: romeo} do
{:ok, pid1} = Romeo.Connection.start_link(romeo)
assert_receive :connection_ready
assert :ok = Romeo.Connection.send(pid1, Romeo.Stanza.presence())
{:ok, pid2} = Romeo.Connection.start_link(romeo)
assert_receive :connection_ready
assert :ok = Romeo.Connection.send(pid2, Romeo.Stanza.presence())
assert_receive {:stanza, %{name: "stream:error"}}
assert_receive {:stanza, xmlstreamend()}
end
test "exchanging messages with others", %{romeo: romeo, juliet: juliet} do
{:ok, romeo} = Romeo.Connection.start_link(romeo)
assert_receive :connection_ready
assert :ok = Romeo.Connection.send(romeo, Romeo.Stanza.presence())
# Romeo receives presense from himself
assert_receive {:stanza, %Presence{}}
{:ok, juliet} = Romeo.Connection.start_link(juliet)
assert_receive :connection_ready
assert :ok = Romeo.Connection.send(juliet, Romeo.Stanza.presence())
# Juliet receives presence from herself and each receive each others'
assert_receive {:stanza, %Presence{}}
assert_receive {:stanza, %Presence{}}
assert_receive {:stanza, %Presence{}}
# Juliet sends Romeo a message
assert :ok =
Romeo.Connection.send(
juliet,
Romeo.Stanza.chat("romeo@localhost/romeo", "Where art thou?")
)
assert_receive {:stanza, %Message{from: from, to: to, body: body}}
assert to_string(from) == "juliet@localhost/juliet"
assert to_string(to) == "romeo@localhost/romeo"
assert body == "Where art thou?"
# Romeo responds
assert :ok =
Romeo.Connection.send(
romeo,
Romeo.Stanza.chat("juliet@localhost/juliet", "Hey babe")
)
assert_receive {:stanza, %Message{from: from, to: to, body: body}}
assert to_string(from) == "romeo@localhost/romeo"
assert to_string(to) == "juliet@localhost/juliet"
assert body == "Hey babe"
end
test "close connection", %{romeo: romeo} do
{:ok, pid} = Romeo.Connection.start_link(romeo)
assert_receive {:resource_bound, _}
assert_receive :connection_ready
assert :ok = Romeo.Connection.close(pid)
refute_receive :connection_ready, 1000
end
end
| 31.911504 | 97 | 0.674709 |
9ee85b4a0fa9d9dd09c4f380b11624a26933312b | 1,009 | ex | Elixir | lib/gazol/application.ex | Conceptx/absinthe-example | 369fbeaf2e688767240e2ec961a2029359580afa | [
"MIT"
] | null | null | null | lib/gazol/application.ex | Conceptx/absinthe-example | 369fbeaf2e688767240e2ec961a2029359580afa | [
"MIT"
] | null | null | null | lib/gazol/application.ex | Conceptx/absinthe-example | 369fbeaf2e688767240e2ec961a2029359580afa | [
"MIT"
] | null | null | null | defmodule Gazol.Application do
use Application
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the Ecto repository
supervisor(Gazol.Repo, []),
# Start the endpoint when the application starts
supervisor(GazolWeb.Endpoint, []),
# Start your own worker by calling: Gazol.Worker.start_link(arg1, arg2, arg3)
# worker(Gazol.Worker, [arg1, arg2, arg3]),
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Gazol.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
GazolWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 31.53125 | 83 | 0.709613 |
9ee85ba90a2001829440c376eb2ac18d0de02f69 | 1,503 | ex | Elixir | lib/collidex/detection/rects.ex | IdahoEv/collidex | ef86928ef264723db8129a9de94acce8183d01bc | [
"MIT"
] | 4 | 2018-08-19T23:05:06.000Z | 2021-01-17T09:17:45.000Z | lib/collidex/detection/rects.ex | IdahoEv/collidex | ef86928ef264723db8129a9de94acce8183d01bc | [
"MIT"
] | 2 | 2016-09-14T19:47:17.000Z | 2016-09-14T19:49:17.000Z | lib/collidex/detection/rects.ex | IdahoEv/collidex | ef86928ef264723db8129a9de94acce8183d01bc | [
"MIT"
] | null | null | null | defmodule Collidex.Detection.Rects do
@moduledoc """
This module tests for collisions between pairs of grid-aligned
rectangles.
"""
@doc """
Test for collision between two grid-aligned
rectangles. Expects two Collidect.Detection.Rect
structs and returns { :collision, "todo_provide_vector" }
if the two rectangles share any points in common, and false otherwise.
## Examples
```
iex> Collidex.Detection.Rects.collision?(
...> Collidex.Geometry.Rect.make(-2, -0.75, 2, -2),
...> Collidex.Geometry.Rect.make(2, 0.5, 3, -0.5)
...> )
false
iex> Collidex.Detection.Rects.collision?(
...> Collidex.Geometry.Rect.make(2, 0.5, 3, -0.5),
...> Collidex.Geometry.Rect.make(3,-3,-3,3)
...> )
{:collision, "todo_provide_vector"}
```
"""
def collision?(r1, r2) do
{ r1x1, r1y1 } = r1.a
{ r1x2, r1y2 } = r1.b
{ r2x1, r2y1 } = r2.a
{ r2x2, r2y2 } = r2.b
x_overlap =
in_range?(r1x1, r2x1, r2x2)
or in_range?(r1x2, r2x1, r2x2)
or in_range?(r2x1, r1x1, r1x2)
or in_range?(r2x2, r1x1, r1x2)
y_overlap =
in_range?(r1y1, r2y1, r2y2)
or in_range?(r1y2, r2y1, r2y2)
or in_range?(r2y1, r1y1, r1y2)
or in_range?(r2y2, r1y1, r1y2)
cond do
x_overlap and y_overlap ->
{ :collision, "todo_provide_vector" }
true ->
false
end
end
defp in_range?(a,b,c) when b > c do
in_range?(a,c,b)
end
defp in_range?(a,b,c) do
a >= b and a <= c
end
end
| 23.857143 | 72 | 0.602129 |
9ee87623de140dfd5e4121a7126df336bd63f774 | 442 | ex | Elixir | lib/blue_heron/att/commands/write_command.ex | kevinansfield/blue_heron | 8339e6747e135030f7d1e67801391f03f2558e0d | [
"Apache-2.0"
] | 45 | 2020-10-17T13:34:15.000Z | 2022-03-08T09:40:43.000Z | lib/blue_heron/att/commands/write_command.ex | kevinansfield/blue_heron | 8339e6747e135030f7d1e67801391f03f2558e0d | [
"Apache-2.0"
] | 20 | 2020-10-15T15:05:54.000Z | 2022-03-27T15:54:36.000Z | lib/blue_heron/att/commands/write_command.ex | kevinansfield/blue_heron | 8339e6747e135030f7d1e67801391f03f2558e0d | [
"Apache-2.0"
] | 11 | 2020-10-23T17:18:57.000Z | 2022-03-15T20:01:49.000Z | defmodule BlueHeron.ATT.WriteCommand do
defstruct [:opcode, :handle, :data]
def deserialize(<<0x52, handle::little-16, data::binary>>) do
%__MODULE__{opcode: 0x52, handle: handle, data: data}
end
def serialize(%{data: %type{} = data} = write_command) do
serialize(%{write_command | data: type.serialize(data)})
end
def serialize(%{handle: handle, data: data}) do
<<0x52, handle::little-16, data::binary>>
end
end
| 27.625 | 63 | 0.678733 |
9ee883be63da6dd1fbd5717ff1838ae7669e3f8e | 1,086 | ex | Elixir | apps/aecore/lib/aecore/peers/worker/supervisor.ex | SingularityMatrix/elixir-node | ad126aa97931165185cf35454718ed2eee40ceed | [
"ISC"
] | null | null | null | apps/aecore/lib/aecore/peers/worker/supervisor.ex | SingularityMatrix/elixir-node | ad126aa97931165185cf35454718ed2eee40ceed | [
"ISC"
] | 2 | 2018-10-01T16:46:26.000Z | 2018-10-01T19:45:42.000Z | apps/aecore/lib/aecore/peers/worker/supervisor.ex | gspasov/dogs-blockchain | 884c14cfc98de2c3793a204da069630d090bbc90 | [
"0BSD"
] | null | null | null | defmodule Aecore.Peers.Worker.Supervisor do
@moduledoc """
Supervises the Peers, PeerConnectionSupervisor, Sync and ranch acceptor processes with a one_for_all strategy
"""
use Supervisor
alias Aecore.Sync.Sync
alias Aecore.Peers.Worker, as: Peers
alias Aecore.Peers.PeerConnection
alias Aecore.Peers.Worker.PeerConnectionSupervisor
alias Aecore.Keys
def start_link(_args) do
Supervisor.start_link(__MODULE__, :ok)
end
def init(:ok) do
{pubkey, privkey} = Keys.keypair(:peer)
children = [
Sync,
PeerConnectionSupervisor,
Peers,
:ranch.child_spec(
:peer_pool,
num_of_acceptors(),
:ranch_tcp,
[port: sync_port()],
PeerConnection,
%{
port: sync_port(),
privkey: privkey,
pubkey: pubkey
}
)
]
Supervisor.init(children, strategy: :one_for_all)
end
def sync_port do
Application.get_env(:aecore, :peers)[:sync_port]
end
def num_of_acceptors do
Application.get_env(:aecore, :peers)[:ranch_acceptors]
end
end
| 21.72 | 111 | 0.657459 |
9ee8887a1e364c53561aafccf16546831403c9b8 | 31,029 | ex | Elixir | lib/ex_aws/s3.ex | taylorbrooks/ex_aws | ff602262eeeacfef2fc0e5b05695a826f4bf1eb4 | [
"Unlicense",
"MIT"
] | null | null | null | lib/ex_aws/s3.ex | taylorbrooks/ex_aws | ff602262eeeacfef2fc0e5b05695a826f4bf1eb4 | [
"Unlicense",
"MIT"
] | null | null | null | lib/ex_aws/s3.ex | taylorbrooks/ex_aws | ff602262eeeacfef2fc0e5b05695a826f4bf1eb4 | [
"Unlicense",
"MIT"
] | null | null | null | defmodule ExAws.S3 do
@moduledoc """
Operations on AWS S3
## Basic Operations
The vast majority of operations here represent a single operation on S3.
### Examples
```
S3.list_objects |> ExAws.request! #=> {:ok, %{body: [list, of, objects]}}
S3.list_objects |> ExAws.stream! |> Enum.to_list #=> [list, of, objects]
S3.put_object("my-bucket", "path/to/bucket", contents) |> ExAws.request!
```
## Higher Level Operations
There are also some operations which operate at a higher level to make it easier
to download and upload very large files.
Multipart uploads
```
"path/to/big/file"
|> S3.Upload.stream_file!
|> S3.upload("my-bucket", "path/on/s3")
|> ExAws.request! #=> {:ok, :done}
```
Download large file to disk
```
S3.download_file("my-bucket", "path/on/s3", "path/to/dest/file")
|> ExAws.request! #=> {:on, :done}
```
## More high level functionality
Flow makes some high level flows so easy you don't need explicit ExAws support.
For example, here is how to concurrently upload many files.
```
upload_file = fn {src_path, dest_path} ->
S3.put_object("my_bucket", dest_path, File.read!(src_path))
|> ExAws.request!
end
paths = %{"path/to/src0" => "path/to/dest0", "path/to/src1" => "path/to/dest1"}
paths
|> Flow.from_enumerable(stages: 10, max_demand: 2)
|> Flow.each(upload_file)
|> Flow.run
```
"""
import ExAws.S3.Utils
alias ExAws.S3.Parsers
alias Experimental.Flow
@type acl_opts :: [{:acl, canned_acl} | grant]
@type grant :: {:grant_read, grantee}
| {:grant_read_acp, grantee}
| {:grant_write_acp, grantee}
| {:grant_full_control, grantee}
@type canned_acl :: :private
| :public_read
| :public_read_write
| :authenticated_read
| :bucket_owner_read
| :bucket_owner_full_control
@type grantee :: [ {:email, binary}
| {:id, binary}
| {:uri, binary}
]
@type customer_encryption_opts :: [
customer_algorithm: binary,
customer_key: binary,
customer_key_md5: binary]
@type encryption_opts :: binary
| [aws_kms_key_id: binary]
| customer_encryption_opts
@type presigned_url_opts :: [
expires_in: integer,
virtual_host: boolean,
query_params: [{:key, binary}]
]
@type amz_meta_opts :: [{atom, binary} | {binary, binary}, ...]
## Buckets
#############
@doc "List buckets"
@spec list_buckets() :: ExAws.Operation.S3.t
@spec list_buckets(opts :: Keyword.t) :: ExAws.Operation.S3.t
def list_buckets(opts \\ []) do
request(:get, "", "/", params: opts)
end
@doc "Delete a bucket"
@spec delete_bucket(bucket :: binary) :: ExAws.Operation.S3.t
def delete_bucket(bucket) do
request(:delete, bucket, "/")
end
@doc "Delete a bucket cors"
@spec delete_bucket_cors(bucket :: binary) :: ExAws.Operation.S3.t
def delete_bucket_cors(bucket) do
request(:delete, bucket, "/", resource: "cors")
end
@doc "Delete a bucket lifecycle"
@spec delete_bucket_lifecycle(bucket :: binary) :: ExAws.Operation.S3.t
def delete_bucket_lifecycle(bucket) do
request(:delete, bucket, "/", resource: "lifecycle")
end
@doc "Delete a bucket policy"
@spec delete_bucket_policy(bucket :: binary) :: ExAws.Operation.S3.t
def delete_bucket_policy(bucket) do
request(:delete, bucket, "/", resource: "policy")
end
@doc "Delete a bucket replication"
@spec delete_bucket_replication(bucket :: binary) :: ExAws.Operation.S3.t
def delete_bucket_replication(bucket) do
request(:delete, bucket, "/", resource: "replication")
end
@doc "Delete a bucket tagging"
@spec delete_bucket_tagging(bucket :: binary) :: ExAws.Operation.S3.t
def delete_bucket_tagging(bucket) do
request(:delete, bucket, "/", resource: "tagging")
end
@doc "Delete a bucket website"
@spec delete_bucket_website(bucket :: binary) :: ExAws.Operation.S3.t
def delete_bucket_website(bucket) do
request(:delete, bucket, "/", resource: "website")
end
@type list_objects_opts :: [
{:delimiter, binary} |
{:marker, binary} |
{:prefix, binary} |
{:encoding_type, binary} |
{:max_keys, 0..1000}
]
@doc """
List objects in bucket
Can be streamed.
"""
@spec list_objects(bucket :: binary) :: ExAws.Operation.S3.t
@spec list_objects(bucket :: binary, opts :: list_objects_opts) :: ExAws.Operation.S3.t
@params [:delimiter, :marker, :prefix, :encoding_type, :max_keys]
def list_objects(bucket, opts \\ []) do
params = opts
|> format_and_take(@params)
request(:get, bucket, "/", [params: params],
stream_builder: &ExAws.S3.Lazy.stream_objects!(bucket, opts, &1),
parser: &ExAws.S3.Parsers.parse_list_objects/1
)
end
@doc "Get bucket acl"
@spec get_bucket_acl(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_acl(bucket) do
request(:get, bucket, "/", resource: "acl")
end
@doc "Get bucket cors"
@spec get_bucket_cors(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_cors(bucket) do
request(:get, bucket, "/", resource: "cors")
end
@doc "Get bucket lifecycle"
@spec get_bucket_lifecycle(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_lifecycle(bucket) do
request(:get, bucket, "/", resource: "lifecycle")
end
@doc "Get bucket policy"
@spec get_bucket_policy(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_policy(bucket) do
request(:get, bucket, "/", resource: "policy")
end
@doc "Get bucket location"
@spec get_bucket_location(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_location(bucket) do
request(:get, bucket, "/", resource: "location")
end
@doc "Get bucket logging"
@spec get_bucket_logging(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_logging(bucket) do
request(:get, bucket, "/", resource: "logging")
end
@doc "Get bucket notification"
@spec get_bucket_notification(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_notification(bucket) do
request(:get, bucket, "/", resource: "notification")
end
@doc "Get bucket replication"
@spec get_bucket_replication(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_replication(bucket) do
request(:get, bucket, "/", resource: "replication")
end
@doc "Get bucket tagging"
@spec get_bucket_tagging(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_tagging(bucket) do
request(:get, bucket, "/", resource: "tagging")
end
@doc "Get bucket object versions"
@spec get_bucket_object_versions(bucket :: binary) :: ExAws.Operation.S3.t
@spec get_bucket_object_versions(bucket :: binary, opts :: Keyword.t) :: ExAws.Operation.S3.t
def get_bucket_object_versions(bucket, opts \\ []) do
request(:get, bucket, "/", resource: "versions", params: opts)
end
@doc "Get bucket payment configuration"
@spec get_bucket_request_payment(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_request_payment(bucket) do
request(:get, bucket, "/", resource: "requestPayment")
end
@doc "Get bucket versioning"
@spec get_bucket_versioning(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_versioning(bucket) do
request(:get, bucket, "/", resource: "versioning")
end
@doc "Get bucket website"
@spec get_bucket_website(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_website(bucket) do
request(:get, bucket, "/", resource: "website")
end
@doc "Determine if a bucket exists"
@spec head_bucket(bucket :: binary) :: ExAws.Operation.S3.t
def head_bucket(bucket) do
request(:head, bucket, "/")
end
@doc "List multipart uploads for a bucket"
@spec list_multipart_uploads(bucket :: binary) :: ExAws.Operation.S3.t
@spec list_multipart_uploads(bucket :: binary, opts :: Keyword.t) :: ExAws.Operation.S3.t
@params [:delimiter, :encoding_type, :max_uploads, :key_marker, :prefix, :upload_id_marker]
def list_multipart_uploads(bucket, opts \\ []) do
params = opts |> format_and_take(@params)
request(:get, bucket, "/", [resource: "uploads", params: params], %{parser: &Parsers.parse_list_multipart_uploads/1})
end
@doc "Creates a bucket. Same as create_bucket/2"
@spec put_bucket(bucket :: binary, region :: binary) :: ExAws.Operation.S3.t
def put_bucket(bucket, region, opts \\ []) do
headers = opts
|> Map.new
|> format_acl_headers
# us-east-1 region needs to be an empty string, cause AWS S3 API sucks.
region = if region == "us-east-1", do: "", else: region
body = """
<CreateBucketConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<LocationConstraint>#{region}</LocationConstraint>
</CreateBucketConfiguration>
"""
request(:put, bucket, "/", body: body, headers: headers)
end
@doc "Update or create a bucket bucket access control"
@spec put_bucket_acl(bucket :: binary, opts :: acl_opts) :: ExAws.Operation.S3.t
def put_bucket_acl(bucket, grants) do
request(:put, bucket, "/", headers: format_acl_headers(grants))
end
@doc "Update or create a bucket CORS policy"
@spec put_bucket_cors(bucket :: binary, cors_config :: %{}) :: ExAws.Operation.S3.t
def put_bucket_cors(bucket, cors_rules) do
rules = cors_rules
|> Enum.map(&build_cors_rule/1)
|> IO.iodata_to_binary
body = "<CORSConfiguration>#{rules}</CORSConfiguration>"
content_md5 = :crypto.hash(:md5, body) |> Base.encode64
headers = %{"content-md5" => content_md5}
request(:put, bucket, "/",
resource: "cors", body: body, headers: headers)
end
@doc "Update or create a bucket lifecycle configuration"
@spec put_bucket_lifecycle(bucket :: binary, lifecycle_config :: %{}) :: no_return
def put_bucket_lifecycle(bucket, _livecycle_config) do
raise "not yet implemented"
request(:put, bucket, "/")
end
@doc "Update or create a bucket policy configuration"
@spec put_bucket_policy(bucket :: binary, policy :: %{}) :: ExAws.Operation.S3.t
def put_bucket_policy(bucket, policy) do
request(:put, bucket, "/", resource: "policy", body: policy)
end
@doc "Update or create a bucket logging configuration"
@spec put_bucket_logging(bucket :: binary, logging_config :: %{}) :: no_return
def put_bucket_logging(bucket, _logging_config) do
raise "not yet implemented"
request(:put, bucket, "/")
end
@doc "Update or create a bucket notification configuration"
@spec put_bucket_notification(bucket :: binary, notification_config :: %{}) :: no_return
def put_bucket_notification(bucket, _notification_config) do
raise "not yet implemented"
request(:put, bucket, "/")
end
@doc "Update or create a bucket replication configuration"
@spec put_bucket_replication(bucket :: binary, replication_config :: %{}) :: no_return
def put_bucket_replication(bucket, _replication_config) do
raise "not yet implemented"
request(:put, bucket, "/")
end
@doc "Update or create a bucket tagging configuration"
@spec put_bucket_tagging(bucket :: binary, tags :: %{}) :: no_return
def put_bucket_tagging(bucket, _tags) do
raise "not yet implemented"
request(:put, bucket, "/")
end
@doc "Update or create a bucket requestPayment configuration"
@spec put_bucket_request_payment(bucket :: binary, payer :: :requester | :bucket_owner) :: no_return
def put_bucket_request_payment(bucket, _payer) do
raise "not yet implemented"
request(:put, bucket, "/")
end
@doc "Update or create a bucket versioning configuration"
@spec put_bucket_versioning(bucket :: binary, version_config :: binary) :: no_return
def put_bucket_versioning(bucket, _version_config) do
raise "not yet implemented"
request(:put, bucket, "/")
end
@doc "Update or create a bucket website configuration"
@spec put_bucket_website(bucket :: binary, website_config :: binary) :: no_return
def put_bucket_website(bucket, _website_config) do
raise "not yet implemented"
request(:put, bucket, "/")
end
## Objects
###########
@doc "Delete object object in bucket"
@spec delete_object(bucket :: binary, object :: binary) :: ExAws.Operation.S3.t
def delete_object(bucket, object, opts \\ []) do
request(:delete, bucket, object, headers: opts |> Map.new)
end
@doc """
Delete multiple objects within a bucket
Limited to 1000 objects.
"""
@spec delete_multiple_objects(
bucket :: binary,
objects :: [binary | {binary, binary}, ...]):: ExAws.Operation.S3.t
@spec delete_multiple_objects(
bucket :: binary,
objects :: [binary | {binary, binary}, ...], opts :: [quiet: true]):: ExAws.Operation.S3.t
def delete_multiple_objects(bucket, objects, opts \\ []) do
objects_xml = Enum.map(objects, fn
{key, version} -> ["<Object><Key>", key, "</Key><VersionId>", version, "</VersionId></Object>"]
key -> ["<Object><Key>", key, "</Key></Object>"]
end)
quiet = case opts do
[quiet: true] -> "<Quiet>true</Quiet>"
_ -> ""
end
body = [
~s(<?xml version="1.0" encoding="UTF-8"?>),
quiet,
"<Delete>",
objects_xml,
"</Delete>"
]
content_md5 = :crypto.hash(:md5, body) |> Base.encode64
body_binary = body |> IO.iodata_to_binary
request(:post, bucket, "/?delete", body: body_binary, headers: %{"content-md5" => content_md5})
end
@doc """
Delete all listed objects.
When performed, this function will continue making `delete_multiple_objects`
requests deleting 1000 objects at a time until all are deleted.
Can be streamed.
"""
@spec delete_all_objects(
bucket :: binary,
objects :: [binary | {binary, binary}, ...]):: ExAws.Operation.S3DeleteAllObjects.t
@spec delete_all_objects(
bucket :: binary,
objects :: [binary | {binary, binary}, ...], opts :: [quiet: true]):: ExAws.Operation.S3DeleteAllObjects.t
def delete_all_objects(bucket, objects, opts \\ []) do
%ExAws.Operation.S3DeleteAllObjects{bucket: bucket, objects: objects, opts: opts}
end
@type get_object_response_opts :: [
{:content_language, binary}
| {:expires, binary}
| {:cach_control, binary}
| {:content_disposition, binary}
| {:content_encoding, binary}
]
@type get_object_opts :: [
{:response, get_object_response_opts}
| head_object_opts
]
@doc "Get an object from a bucket"
@spec get_object(bucket :: binary, object :: binary) :: ExAws.Operation.S3.t
@spec get_object(bucket :: binary, object :: binary, opts :: get_object_opts) :: ExAws.Operation.S3.t
@response_params [:content_type, :content_language, :expires, :cache_control, :content_disposition, :content_encoding]
@request_headers [:range, :if_modified_since, :if_unmodified_since, :if_match, :if_none_match]
def get_object(bucket, object, opts \\ []) do
opts = opts |> Map.new
response_opts = opts
|> Map.get(:response, %{})
|> format_and_take(@response_params)
|> namespace("response")
headers = opts
|> format_and_take(@request_headers)
headers = opts
|> Map.get(:encryption, %{})
|> build_encryption_headers
|> Map.merge(headers)
request(:get, bucket, object, headers: headers, params: response_opts)
end
@type download_file_opts :: [
max_concurrency: pos_integer,
chunk_size: pos_integer,
timeout: 60_000,
]
@doc """
Download an S3 Object to a file.
This operation download multiple parts of an S3 object concurrently, allowing
you to maximize throughput.
Defaults to a concurrency of 8, chunk size of 1MB, and a timeout of 1 minute.
"""
@spec download_file(bucket :: binary, path :: binary, dest :: binary) :: __MODULE__.Download.t
@spec download_file(bucket :: binary, path :: binary, dest :: binary, opts :: download_file_opts) :: __MODULE__.Download.t
def download_file(bucket, path, dest, opts \\ []) do
%__MODULE__.Download{
bucket: bucket,
path: path,
dest: dest,
opts: opts
}
end
@type upload_opts :: [{:max_concurrency, pos_integer} | initiate_multipart_upload_opts ]
@doc """
Multipart upload to S3.
Handles initialization, uploading parts concurrently, and multipart upload completion.
## Uploading a stream
Streams that emit binaries may be uploaded directly to S3. Each binary will be uploaded
as a chunk, so it must be at least 5 megabytes in size. The `S3.Upload.stream_file`
helper takes care of reading the file in 5 megabyte chunks.
```
"path/to/big/file"
|> S3.Upload.stream_file
|> S3.upload("my-bucket", "path/on/s3")
|> ExAws.request! #=> :done
```
## Uploading a flow
GenStage Flows can also be uploaded directly to S3. To ensure all parts of
the file are assembled in the correct order when the upload is finalized, the
producer must emit events with the following format:
`{binary, one_based_index}`. Each binary will be uploaded as a chunk and
must be at least 5 megabytes in size.
```
enumerable = ["hello world"] |> Stream.with_index(1)
Flow.from_enumerable(enumerable, stages: 4, max_demand: 2)
|> S3.upload("my-bucket", "test.txt")
|> ExAws.request! #=> :done
```
## Options
These options are specific to this function
* `:max_concurrency` -- The number of concurrent processes reading from this
stream. Only applies when uploading a stream.
All other options (ex. `:content_type`) are passed through to
`ExAws.S3.initiate_multipart_upload/3`.
"""
@spec upload(
source :: Enumerable.t | Flow.t,
bucket :: String.t,
path :: String.t,
opts :: upload_opts) :: __MODULE__.Upload.t
def upload(source, bucket, path, opts \\ []) do
%__MODULE__.Upload{
src: source,
bucket: bucket,
path: path,
opts: opts,
}
end
@doc "Get an object's access control policy"
@spec get_object_acl(bucket :: binary, object :: binary) :: ExAws.Operation.S3.t
@spec get_object_acl(bucket :: binary, object :: binary, opts :: Keyword.t) :: ExAws.Operation.S3.t
def get_object_acl(bucket, object, opts \\ []) do
request(:get, bucket, object, resource: "acl", headers: opts |> Map.new)
end
@doc "Get a torrent for a bucket"
@spec get_object_torrent(bucket :: binary, object :: binary) :: ExAws.Operation.S3.t
def get_object_torrent(bucket, object) do
request(:get, bucket, object, resource: "torrent")
end
@type head_object_opts :: [
{:encryption, customer_encryption_opts}
| {:range, binary}
| {:if_modified_since, binary}
| {:if_unmodified_since, binary}
| {:if_match, binary}
| {:if_none_match, binary}
]
@doc "Determine of an object exists"
@spec head_object(bucket :: binary, object :: binary) :: ExAws.Operation.S3.t
@spec head_object(bucket :: binary, object :: binary, opts :: head_object_opts) :: ExAws.Operation.S3.t
@request_headers [:range, :if_modified_since, :if_unmodified_since, :if_match, :if_none_match]
def head_object(bucket, object, opts \\ []) do
opts = opts |> Map.new
headers = opts
|> format_and_take(@request_headers)
headers = opts
|> Map.get(:encryption, %{})
|> build_encryption_headers
|> Map.merge(headers)
params = case Map.fetch(opts, :version_id) do
{:ok, id} -> %{"versionId" => id}
_ -> %{}
end
request(:head, bucket, object, headers: headers, params: params)
end
@doc "Determine the CORS configuration for an object"
@spec options_object(
bucket :: binary,
object :: binary,
origin :: binary,
request_method :: atom) :: ExAws.Operation.S3.t
@spec options_object(
bucket :: binary,
object :: binary,
origin :: binary,
request_method :: atom,
request_headers :: [binary]) :: ExAws.Operation.S3.t
def options_object(bucket, object, origin, request_method, request_headers \\ []) do
headers = [
{"Origin", origin},
{"Access-Control-Request-Method", request_method},
{"Access-Control-Request-Headers", request_headers |> Enum.join(",")},
]
request(:options, bucket, object, headers: headers)
end
@doc "Restore an object to a particular version"
@spec post_object_restore(
bucket :: binary,
object :: binary,
number_of_days :: pos_integer) :: ExAws.Operation.S3.t
@spec post_object_restore(
bucket :: binary,
object :: binary,
number_of_days :: pos_integer,
opts :: [version_id: binary]) :: ExAws.Operation.S3.t
def post_object_restore(bucket, object, number_of_days, opts \\ []) do
params = case Keyword.fetch(opts, :version_id) do
{:ok, id} -> %{"versionId" => id}
_ -> %{}
end
body = """
<RestoreRequest xmlns="http://s3.amazonaws.com/doc/2006-3-01">
<Days>#{number_of_days}</Days>
</RestoreRequest>
"""
request(:post, bucket, object, resource: "restore", params: params, body: body)
end
@type put_object_opts :: [
{:cache_control, binary}
| {:content_disposition, binary}
| {:content_encoding, binary}
| {:content_length, binary}
| {:content_type, binary}
| {:expect, binary}
| {:expires, binary}
| {:storage_class, :standard | :redunced_redundancy}
| {:website_redirect_location, binary}
| {:encryption, encryption_opts}
| {:meta, amz_meta_opts}
| acl_opts
]
@doc "Create an object within a bucket"
@spec put_object(bucket :: binary, object :: binary, body :: binary) :: ExAws.Operation.S3.t
@spec put_object(bucket :: binary, object :: binary, body :: binary, opts :: put_object_opts) :: ExAws.Operation.S3.t
def put_object(bucket, object, body, opts \\ []) do
request(:put, bucket, object, body: body, headers: put_object_headers(opts))
end
@doc "Create or update an object's access control FIXME"
@spec put_object_acl(bucket :: binary, object :: binary, acl :: acl_opts) :: ExAws.Operation.S3.t
def put_object_acl(bucket, object, acl) do
headers = acl |> Map.new |> format_acl_headers
request(:put, bucket, object, headers: headers, resource: "acl")
end
@type pub_object_copy_opts :: [
{:metadata_directive, :COPY | :REPLACE}
| {:copy_source_if_modified_since, binary}
| {:copy_source_if_unmodified_since, binary}
| {:copy_source_if_match, binary}
| {:copy_source_if_none_match, binary}
| {:website_redirect_location, binary}
| {:destination_encryption, encryption_opts}
| {:source_encryption, customer_encryption_opts}
| {:cache_control, binary}
| {:content_disposition, binary}
| {:content_encoding, binary}
| {:content_length, binary}
| {:content_type, binary}
| {:expect, binary}
| {:expires, binary}
| {:storage_class, :standard | :redunced_redundancy}
| {:website_redirect_location, binary}
| {:meta, amz_meta_opts}
| acl_opts
]
@doc "Copy an object"
@spec put_object_copy(
dest_bucket :: binary,
dest_object :: binary,
src_bucket :: binary,
src_object :: binary) :: ExAws.Operation.S3.t
@spec put_object_copy(
dest_bucket :: binary,
dest_object :: binary,
src_bucket :: binary,
src_object :: binary,
opts :: pub_object_copy_opts) :: ExAws.Operation.S3.t
@amz_headers ~w(
metadata_directive
copy_source_if_modified_since
copy_source_if_unmodified_since
copy_source_if_match
copy_source_if_none_match
storage_class
website_redirect_location)a
def put_object_copy(dest_bucket, dest_object, src_bucket, src_object, opts \\ []) do
opts = opts |> Map.new
amz_headers = opts
|> format_and_take(@amz_headers)
|> namespace("x-amz")
source_encryption = opts
|> Map.get(:source_encryption, %{})
|> build_encryption_headers
|> Enum.into(%{}, fn {<<"x-amz", k :: binary>>, v} ->
{"x-amz-copy-source" <> k, v}
end)
destination_encryption = opts
|> Map.get(:destination_encryption, %{})
|> build_encryption_headers
regular_headers = opts
|> Map.delete(:encryption)
|> put_object_headers
headers = regular_headers
|> Map.merge(amz_headers)
|> Map.merge(source_encryption)
|> Map.merge(destination_encryption)
|> Map.put("x-amz-copy-source", URI.encode "/#{src_bucket}/#{src_object}")
request(:put, dest_bucket, dest_object, headers: headers)
end
@type initiate_multipart_upload_opts :: [ {:cache_control, binary}
| {:content_disposition, binary}
| {:content_encoding, binary}
| {:content_type, binary}
| {:expires, binary}
| {:storage_class, :standard | :redunced_redundancy}
| {:website_redirect_location, binary}
| {:encryption, encryption_opts}
| acl_opts
]
@doc "Initiate a multipart upload"
@spec initiate_multipart_upload(bucket :: binary, object :: binary) :: ExAws.Operation.S3.t
@spec initiate_multipart_upload(bucket :: binary, object :: binary, opts :: initiate_multipart_upload_opts) :: ExAws.Operation.S3.t
def initiate_multipart_upload(bucket, object, opts \\ []) do
request(:post, bucket, object, [resource: "uploads", headers: put_object_headers(opts)], %{parser: &Parsers.parse_initiate_multipart_upload/1})
end
@doc "Upload a part for a multipart upload"
@spec upload_part(
bucket :: binary,
object :: binary,
upload_id :: binary,
part_number :: pos_integer,
body :: binary) :: ExAws.Operation.S3.t
@spec upload_part(
bucket :: binary,
object :: binary,
upload_id :: binary,
part_number :: pos_integer,
body :: binary,
opts :: [encryption_opts | {:expect, binary}]) :: ExAws.Operation.S3.t
def upload_part(bucket, object, upload_id, part_number, body, _opts \\ []) do
params = %{"uploadId" => upload_id, "partNumber" => part_number}
request(:put, bucket, object, params: params, body: body)
end
@type upload_part_copy_opts :: [
{:copy_source_range, Range.t}
| {:copy_source_if_modified_since, binary}
| {:copy_source_if_unmodified_since, binary}
| {:copy_source_if_match, binary}
| {:copy_source_if_none_match, binary}
| {:destination_encryption, encryption_opts}
| {:source_encryption, customer_encryption_opts}
]
@doc "Upload a part for a multipart copy"
@spec upload_part_copy(
dest_bucket :: binary,
dest_object :: binary,
src_bucket :: binary,
src_object :: binary) :: ExAws.Operation.S3.t
@spec upload_part_copy(
dest_bucket :: binary,
dest_object :: binary,
src_bucket :: binary,
src_object :: binary,
opts :: upload_part_copy_opts) :: ExAws.Operation.S3.t
@amz_headers ~w(
copy_source_if_modified_since
copy_source_if_unmodified_since
copy_source_if_match
copy_source_if_none_match)a
def upload_part_copy(dest_bucket, dest_object, src_bucket, src_object, opts \\ []) do
opts = opts |> Map.new
source_encryption = opts
|> Map.get(:source_encryption, %{})
|> build_encryption_headers
|> Enum.into(%{}, fn {<<"x-amz", k :: binary>>, v} ->
{"x-amz-copy-source" <> k, v}
end)
destination_encryption = opts
|> Map.get(:destination_encryption, %{})
|> build_encryption_headers
headers = opts
|> format_and_take(@amz_headers)
|> namespace("x-amz")
|> Map.merge(source_encryption)
|> Map.merge(destination_encryption)
headers = case opts do
%{copy_source_range: first..last} -> Map.put(headers, "x-amz-copy-source-range", "bytes=#{first}-#{last}")
_ -> headers
end
|> Map.put("x-amz-copy-source", "/#{src_bucket}/#{src_object}")
request(:put, dest_bucket, dest_object, [headers: headers], %{parser: &Parsers.parse_upload_part_copy/1})
end
@doc "Complete a multipart upload"
@spec complete_multipart_upload(
bucket :: binary,
object :: binary,
upload_id :: binary,
parts :: [{binary | pos_integer, binary}, ...]) :: ExAws.Operation.S3.t
def complete_multipart_upload(bucket, object, upload_id, parts) do
parts_xml = parts
|> Enum.map(fn {part_number, etag}->
["<Part>",
"<PartNumber>", Integer.to_string(part_number), "</PartNumber>",
"<ETag>", etag, "</ETag>",
"</Part>"]
end)
body = ["<CompleteMultipartUpload>", parts_xml, "</CompleteMultipartUpload>"]
|> IO.iodata_to_binary
request(:post, bucket, object, [params: %{"uploadId" => upload_id}, body: body], %{parser: &Parsers.parse_complete_multipart_upload/1})
end
@doc "Abort a multipart upload"
@spec abort_multipart_upload(bucket :: binary, object :: binary, upload_id :: binary) :: ExAws.Operation.S3.t
def abort_multipart_upload(bucket, object, upload_id) do
request(:delete, bucket, object, params: %{"uploadId" => upload_id})
end
@doc "List the parts of a multipart upload"
@spec list_parts(bucket :: binary, object :: binary, upload_id :: binary) :: ExAws.Operation.S3.t
@spec list_parts(bucket :: binary, object :: binary, upload_id :: binary, opts :: Keyword.t) :: ExAws.Operation.S3.t
def list_parts(bucket, object, upload_id, opts \\ []) do
params = opts
|> Map.new
|> Map.merge(%{"uploadId" => upload_id})
request(:get, bucket, object, [params: params], %{parser: &Parsers.parse_list_parts/1})
end
@doc """
Generates a pre-signed URL for this object.
When option param :virtual_host is `true`, the {#bucket} name will be used as
the hostname. This will cause the returned URL to be 'http' and not 'https'.
Additional (signed) query parameters can be added to the url by setting option param
`:query_params` to a list of `{"key", "value"}` pairs. Useful if you are uploading parts of
a multipart upload directly from the browser.
"""
@spec presigned_url(config :: %{}, http_method :: atom, bucket :: binary, object :: binary, opts :: presigned_url_opts) :: {:ok, binary} | {:error, binary}
@one_week 60 * 60 * 24 * 7
def presigned_url(config, http_method, bucket, object, opts \\ []) do
expires_in = Keyword.get(opts, :expires_in, 3600)
virtual_host = Keyword.get(opts, :virtual_host, false)
query_params = Keyword.get(opts, :query_params, [])
case expires_in > @one_week do
true -> {:error, "expires_in_exceeds_one_week"}
false ->
url = url_to_sign(bucket, object, config, virtual_host)
datetime = :calendar.universal_time
{:ok, ExAws.Auth.presigned_url(http_method, url, :s3, datetime, config, expires_in, query_params)}
end
end
defp url_to_sign(bucket, object, config, virtual_host) do
object = ensure_slash(object)
case virtual_host do
true -> "#{config[:scheme]}#{bucket}.#{config[:host]}#{object}"
false -> "#{config[:scheme]}#{config[:host]}/#{bucket}#{object}"
end
end
defp request(http_method, bucket, path, data \\ [], opts \\ %{}) do
%ExAws.Operation.S3{
http_method: http_method,
bucket: bucket,
path: path,
body: data[:body] || "",
headers: data[:headers] || %{},
resource: data[:resource] || "",
params: data[:params] || %{}
} |> struct(opts)
end
end
| 33.985761 | 157 | 0.669213 |
9ee88d100b31c89c35d83908a024513b24796e0d | 69 | ex | Elixir | elixir/quick-elixir-blog/blog_phoenix/web/views/layout_view.ex | Jufebrown/learning-projects | 9b8e0718c18ad00b7bb40a72c5c204471b28a100 | [
"MIT"
] | null | null | null | elixir/quick-elixir-blog/blog_phoenix/web/views/layout_view.ex | Jufebrown/learning-projects | 9b8e0718c18ad00b7bb40a72c5c204471b28a100 | [
"MIT"
] | null | null | null | elixir/quick-elixir-blog/blog_phoenix/web/views/layout_view.ex | Jufebrown/learning-projects | 9b8e0718c18ad00b7bb40a72c5c204471b28a100 | [
"MIT"
] | null | null | null | defmodule BlogPhoenix.LayoutView do
use BlogPhoenix.Web, :view
end
| 17.25 | 35 | 0.811594 |
9ee8a919e2128ee4c5a0caf7173fd945bc45d560 | 596 | ex | Elixir | lib/eggman/cfn/core.ex | ohr486/eggman | 82eb614dab64aadaa556454b9c3c89ca9ed3c9b2 | [
"MIT"
] | 1 | 2020-01-13T20:50:40.000Z | 2020-01-13T20:50:40.000Z | lib/eggman/cfn/core.ex | ohr486/eggman | 82eb614dab64aadaa556454b9c3c89ca9ed3c9b2 | [
"MIT"
] | 25 | 2019-07-02T05:57:46.000Z | 2021-07-26T05:20:55.000Z | lib/eggman/cfn/core.ex | ohr486/eggman | 82eb614dab64aadaa556454b9c3c89ca9ed3c9b2 | [
"MIT"
] | null | null | null | defmodule Eggman.Cfn.Core do
alias ExAws.Cloudformation
@doc false
def list_stacks(stack_status, ""), do: list_stacks_base(stack_status)
def list_stacks(stack_status, nil), do: list_stacks_base(stack_status)
def list_stacks(stack_status, keyword) do
list_stacks_base(stack_status)
|> Enum.filter(&String.contains?(&1, keyword))
end
@doc false
defp list_stacks_base(stack_status) do
Cloudformation.list_stacks([stack_status_filters: [stack_status]])
|> ExAws.request!
|> Map.get(:body)
|> Map.get(:stacks)
|> Enum.map(&(Map.get(&1, :name)))
end
end
| 28.380952 | 72 | 0.714765 |
9ee8e032b38356424f8825f069dedd531761f4a5 | 856 | exs | Elixir | apps/legion/test/messaging/recipient_test.exs | i386-64/legion | 41ae99af9be962d7fb38726ddf4bb0456edb5ca4 | [
"Apache-2.0"
] | 1 | 2021-01-04T11:06:12.000Z | 2021-01-04T11:06:12.000Z | apps/legion/test/messaging/recipient_test.exs | i386-64/legion | 41ae99af9be962d7fb38726ddf4bb0456edb5ca4 | [
"Apache-2.0"
] | 3 | 2021-01-30T06:40:37.000Z | 2021-01-30T06:41:08.000Z | apps/legion/test/messaging/recipient_test.exs | i386-64/legion | 41ae99af9be962d7fb38726ddf4bb0456edb5ca4 | [
"Apache-2.0"
] | null | null | null | defmodule Legion.Messaging.Message.RecipientTest do
@moduledoc false
use Legion.DataCase
alias Legion.Messaging.Message.Recipient
@valid_attrs %{message_id: 1, recipient_id: 1}
test "changeset with valid attributes" do
changeset = Recipient.changeset(%Recipient{}, @valid_attrs)
assert changeset.valid?
end
test "changeset without message identifier" do
changeset = Recipient.changeset(%Recipient{}, attrs_drop_key(:message_id))
refute changeset.valid?
end
test "changeset without recipient identifier" do
changeset = Recipient.changeset(%Recipient{}, attrs_drop_key(:recipient_id))
refute changeset.valid?
end
test "changeset is invalid with default params either" do
refute Recipient.changeset(%Recipient{}).valid?
end
def attrs_drop_key(key) do
Map.delete(@valid_attrs, key)
end
end
| 24.457143 | 80 | 0.746495 |
9ee904abd2b6b1d7d905b97502dff9234dadb70a | 1,974 | ex | Elixir | web/controllers/v1/relay_group_membership_controller.ex | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 1,003 | 2016-02-23T17:21:12.000Z | 2022-02-20T14:39:35.000Z | web/controllers/v1/relay_group_membership_controller.ex | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 906 | 2016-02-22T22:54:19.000Z | 2022-03-11T15:19:43.000Z | web/controllers/v1/relay_group_membership_controller.ex | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 95 | 2016-02-23T13:42:31.000Z | 2021-11-30T14:39:55.000Z | defmodule Cog.V1.RelayGroupMembershipController do
use Cog.Web, :controller
alias Cog.Models.RelayGroup
alias Cog.Repository.RelayGroups
plug Cog.Plug.Authentication
plug Cog.Plug.Authorization, permission: "#{Cog.Util.Misc.embedded_bundle}:manage_relays"
plug :put_view, Cog.V1.RelayGroupView
def relay_index(conn, %{"id" => id}) do
relay_group = Repo.get!(RelayGroup, id)
|> Repo.preload([:relays, [bundles: :versions]])
render(conn, "relays.json", relay_group: relay_group)
end
def bundle_index(conn, %{"id" => id}) do
relay_group = Repo.get!(RelayGroup, id)
|> Repo.preload([bundles: :versions])
render(conn, "bundles.json", relay_group: relay_group)
end
def manage_relay_membership(conn, %{"id" => id, "relays" => member_spec}),
do: manage_association(conn, %{"id" => id, "members" => %{"relays" => member_spec}})
def manage_bundle_assignment(conn, %{"id" => id, "bundles" => member_spec}),
do: manage_association(conn, %{"id" => id, "members" => %{"bundles" => member_spec}})
# Manage membership of a relay group. Adds and deletes can be submitted and
# processed in a single request.
def manage_association(conn, %{"id" => id, "members" => member_spec}) do
relay_group = Repo.get!(RelayGroup, id)
case RelayGroups.manage_association(relay_group, member_spec) do
{:ok, relay_group} ->
conn
|> render("show.json", relay_group: relay_group)
{:error, {:not_found, {type, ids}}} ->
conn
|> put_status(:unprocessable_entity)
|> json(%{"errors" => %{"not_found" => %{type => ids}}})
{:error, {:bad_id, {type, ids}}} ->
conn
|> put_status(:unprocessable_entity)
|> json(%{"errors" => %{"bad_id" => %{type => ids}}})
{:error, {:protected_bundle, bundle_name}} ->
conn
|> put_status(:unprocessable_entity)
|> json(%{"errors" => %{"protected_bundle" => bundle_name}})
end
end
end
| 35.890909 | 91 | 0.636272 |
9ee914dc21e1fcef6c21cf07f724d4f286fdd3f4 | 4,455 | exs | Elixir | test/elixir/test/recreate_doc_test.exs | frapa/couchdb | 6c28960f0fe2eec06aca7d58fd73f3c7cdbe1112 | [
"Apache-2.0"
] | 1 | 2022-01-14T20:52:55.000Z | 2022-01-14T20:52:55.000Z | test/elixir/test/recreate_doc_test.exs | frapa/couchdb | 6c28960f0fe2eec06aca7d58fd73f3c7cdbe1112 | [
"Apache-2.0"
] | 1 | 2018-02-08T23:08:43.000Z | 2018-02-08T23:08:43.000Z | test/elixir/test/recreate_doc_test.exs | frapa/couchdb | 6c28960f0fe2eec06aca7d58fd73f3c7cdbe1112 | [
"Apache-2.0"
] | null | null | null | defmodule RecreateDocTest do
use CouchTestCase
@moduletag :recreate_doc
@moduletag kind: :single_node
@moduledoc """
Test CouchDB document recreation
This is a port of the recreate_doc.js suite
"""
@tag :with_db
test "recreate document", context do
db_name = context[:db_name]
# First create a new document with the ID "foo", and delete it again
doc = %{_id: "foo", a: "bar", b: 42}
{:ok, resp} = create_doc(db_name, doc)
first_rev = resp.body["rev"]
resp = Couch.delete("/#{db_name}/foo?rev=#{first_rev}")
assert resp.status_code == 200
# Now create a new document with the same ID, save it, and then modify it
doc = %{_id: "foo"}
for _i <- 0..9 do
{:ok, _} = create_doc(db_name, doc)
resp = Couch.get("/#{db_name}/foo")
updated_doc =
resp.body
|> Map.put("a", "baz")
resp = Couch.put("/#{db_name}/foo", body: updated_doc)
assert resp.status_code == 201
rev = resp.body["rev"]
resp = Couch.delete("/#{db_name}/foo?rev=#{rev}")
assert resp.status_code == 200
end
end
@tag :with_db
test "COUCHDB-292 - recreate a deleted document", context do
db_name = context[:db_name]
# First create a new document with the ID "foo", and delete it again
doc = %{_id: "foo", a: "bar", b: 42}
{:ok, resp} = create_doc(db_name, doc)
first_rev = resp.body["rev"]
resp = Couch.delete("/#{db_name}/foo?rev=#{first_rev}")
assert resp.status_code == 200
# COUCHDB-292 now attempt to save the document with a prev that's since
# been deleted and this should generate a conflict exception
updated_doc =
doc
|> Map.put(:_rev, first_rev)
resp = Couch.put("/#{db_name}/foo", body: updated_doc)
assert resp.status_code == 409
# same as before, but with binary
bin_att_doc = %{
_id: "foo",
_rev: first_rev,
_attachments: %{
"foo.txt": %{
content_type: "text/plain",
data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
}
}
}
resp = Couch.put("/#{db_name}/foo", body: bin_att_doc)
assert resp.status_code == 409
end
@tag :with_db
test "Recreate a deleted document with non-exsistant rev", context do
db_name = context[:db_name]
doc = %{_id: "foo", a: "bar", b: 42}
{:ok, resp} = create_doc(db_name, doc)
first_rev = resp.body["rev"]
resp = Couch.delete("/#{db_name}/foo?rev=#{first_rev}")
assert resp.status_code == 200
# random non-existant prev rev
updated_doc =
doc
|> Map.put(:_rev, "1-asfafasdf")
resp = Couch.put("/#{db_name}/foo", body: updated_doc)
assert resp.status_code == 409
# random non-existant prev rev with bin
bin_att_doc = %{
_id: "foo",
_rev: "1-aasasfasdf",
_attachments: %{
"foo.txt": %{
content_type: "text/plain",
data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
}
}
}
resp = Couch.put("/#{db_name}/foo", body: bin_att_doc)
assert resp.status_code == 409
end
@tag :with_db
test "COUCHDB-1265 - changes feed after we try and break the update_seq tree",
context do
db_name = context[:db_name]
# Test COUCHDB-1265 - Reinserting an old revision into the revision tree causes
# duplicates in the update_seq tree.
revs = create_rev_doc(db_name, "a", 3)
resp =
Couch.put("/#{db_name}/a",
body: Enum.at(revs, 0),
query: [new_edits: false]
)
assert resp.status_code == 201
resp =
Couch.put("/#{db_name}/a",
body: Enum.at(revs, -1)
)
assert resp.status_code == 201
resp = Couch.get("/#{db_name}/_changes")
assert resp.status_code == 200
assert length(resp.body["results"]) == 1
end
# function to create a doc with multiple revisions
defp create_rev_doc(db_name, id, num_revs) do
doc = %{_id: id, count: 0}
{:ok, resp} = create_doc(db_name, doc)
create_rev_doc(db_name, id, num_revs, [Map.put(doc, :_rev, resp.body["rev"])])
end
defp create_rev_doc(db_name, id, num_revs, revs) do
if length(revs) < num_revs do
doc = %{_id: id, _rev: Enum.at(revs, -1)[:_rev], count: length(revs)}
{:ok, resp} = create_doc(db_name, doc)
create_rev_doc(
db_name,
id,
num_revs,
revs ++ [Map.put(doc, :_rev, resp.body["rev"])]
)
else
revs
end
end
end
| 26.676647 | 83 | 0.60404 |
9ee9202c6c82f7afe3f982ef2be280d355d6803a | 715 | ex | Elixir | server/lib/secure_notepad_server/options.ex | thooton/secure-notepad | e780c660a9ea6e425a0e06ff1f9469f527251815 | [
"MIT"
] | null | null | null | server/lib/secure_notepad_server/options.ex | thooton/secure-notepad | e780c660a9ea6e425a0e06ff1f9469f527251815 | [
"MIT"
] | null | null | null | server/lib/secure_notepad_server/options.ex | thooton/secure-notepad | e780c660a9ea6e425a0e06ff1f9469f527251815 | [
"MIT"
] | null | null | null | defmodule SecureNotepadServer.Options do
use Agent
def start_link(_) do
state = Path.join(:code.priv_dir(:secure_notepad_server), "config.json")
|> File.read!
|> Poison.decode!
Agent.start_link(fn -> state end, name: __MODULE__)
end
def get do
Agent.get(__MODULE__, & &1)
end
def remoteip_headers do
header_name = get()["real_ip_header"]
if is_binary(header_name) do
[header_name |> String.downcase]
else
[]
end
end
def ratelimit_options do
reqs_per_minute =
get()["max_requests_per_minute"]
|> SecureNotepadServer.Argon2w.try_parse_int
[
rate_limit: {"a_request", 60_000, reqs_per_minute},
by: :ip
]
end
end
| 21.029412 | 76 | 0.655944 |
9ee94872e6bdddb0306cdd92d8a5d5613f72771a | 113 | exs | Elixir | config/config.exs | hlappa/BB | 0a57a326621ff0d7f73d71be85b14d13bfa3b168 | [
"MIT"
] | null | null | null | config/config.exs | hlappa/BB | 0a57a326621ff0d7f73d71be85b14d13bfa3b168 | [
"MIT"
] | null | null | null | config/config.exs | hlappa/BB | 0a57a326621ff0d7f73d71be85b14d13bfa3b168 | [
"MIT"
] | null | null | null | import Config
config :binance,
api_key: "",
secret_key: ""
config :bb,
symbol: "XRPEUR",
quantity: 100
| 11.3 | 19 | 0.646018 |
9ee9726059132eb7e2610786002bbdfcfa9de2ac | 1,334 | ex | Elixir | host_core/lib/host_core/control_interface/acl.ex | adobe-platform/wasmcloud-otp | bcfcdf9814bc529e67c954eacabdc9a05c772cfa | [
"Apache-2.0"
] | null | null | null | host_core/lib/host_core/control_interface/acl.ex | adobe-platform/wasmcloud-otp | bcfcdf9814bc529e67c954eacabdc9a05c772cfa | [
"Apache-2.0"
] | null | null | null | host_core/lib/host_core/control_interface/acl.ex | adobe-platform/wasmcloud-otp | bcfcdf9814bc529e67c954eacabdc9a05c772cfa | [
"Apache-2.0"
] | null | null | null | defmodule HostCore.ControlInterface.ACL do
@moduledoc false
def all_actors() do
HostCore.Actors.ActorSupervisor.all_actors()
|> Enum.flat_map(fn {id, pids} ->
revision = get_revision(id)
pids
|> Enum.map(fn pid ->
%{
id: id,
revision: revision,
image_ref: HostCore.Actors.ActorModule.ociref(pid),
instance_id: HostCore.Actors.ActorModule.instance_id(pid)
}
end)
end)
end
def all_providers() do
# TODO: retrieve revision information for provider
HostCore.Providers.ProviderSupervisor.all_providers()
|> Enum.map(fn {pid, pk, link, _contract, instance_id} ->
%{
id: pk,
link_name: link,
revision: 0,
image_ref: HostCore.Providers.ProviderModule.ociref(pid),
instance_id: instance_id
}
end)
end
def find_oci_for_pk(pk) do
:ets.match(:refmap_table, {:"$1", pk})
end
def find_claims_for_pk(pk) do
case :ets.lookup(:claims_table, pk) do
[{_pk, claims}] -> [claims]
_ -> nil
end
end
def get_revision(pk) do
case find_claims_for_pk(pk) do
[claims] -> String.to_integer(claims.rev)
_ -> 0
end
end
def get_image_ref(pk) do
case find_oci_for_pk(pk) do
[[oci]] -> oci
_ -> nil
end
end
end
| 22.233333 | 67 | 0.605697 |
9ee9780971f677cc8c7610c9c1882f1499b2296d | 535 | ex | Elixir | clients/elixir/generated/lib/swaggy_jenkins/model/pipeline_run_nodeedges.ex | cliffano/jenkins-api-clients-generator | 522d02b3a130a29471df5ec1d3d22c822b3d0813 | [
"MIT"
] | null | null | null | clients/elixir/generated/lib/swaggy_jenkins/model/pipeline_run_nodeedges.ex | cliffano/jenkins-api-clients-generator | 522d02b3a130a29471df5ec1d3d22c822b3d0813 | [
"MIT"
] | null | null | null | clients/elixir/generated/lib/swaggy_jenkins/model/pipeline_run_nodeedges.ex | cliffano/jenkins-api-clients-generator | 522d02b3a130a29471df5ec1d3d22c822b3d0813 | [
"MIT"
] | null | null | null | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule SwaggyJenkins.Model.PipelineRunNodeedges do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"id",
:"_class"
]
@type t :: %__MODULE__{
:"id" => String.t | nil,
:"_class" => String.t | nil
}
end
defimpl Poison.Decoder, for: SwaggyJenkins.Model.PipelineRunNodeedges do
def decode(value, _options) do
value
end
end
| 19.107143 | 91 | 0.671028 |
9ee9802963d35be52aa0b0858eda68f3d250f162 | 2,309 | ex | Elixir | clients/app_engine/lib/google_api/app_engine/v1/model/identity_aware_proxy.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/app_engine/lib/google_api/app_engine/v1/model/identity_aware_proxy.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/app_engine/lib/google_api/app_engine/v1/model/identity_aware_proxy.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AppEngine.V1.Model.IdentityAwareProxy do
@moduledoc """
Identity-Aware Proxy
## Attributes
- enabled (boolean()): Whether the serving infrastructure will authenticate and authorize all incoming requests.If true, the oauth2_client_id and oauth2_client_secret fields must be non-empty. Defaults to: `null`.
- oauth2ClientId (String.t): OAuth2 client ID to use for the authentication flow. Defaults to: `null`.
- oauth2ClientSecret (String.t): OAuth2 client secret to use for the authentication flow.For security reasons, this value cannot be retrieved via the API. Instead, the SHA-256 hash of the value is returned in the oauth2_client_secret_sha256 field.@InputOnly Defaults to: `null`.
- oauth2ClientSecretSha256 (String.t): Hex-encoded SHA-256 hash of the client secret.@OutputOnly Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:enabled => any(),
:oauth2ClientId => any(),
:oauth2ClientSecret => any(),
:oauth2ClientSecretSha256 => any()
}
field(:enabled)
field(:oauth2ClientId)
field(:oauth2ClientSecret)
field(:oauth2ClientSecretSha256)
end
defimpl Poison.Decoder, for: GoogleApi.AppEngine.V1.Model.IdentityAwareProxy do
def decode(value, options) do
GoogleApi.AppEngine.V1.Model.IdentityAwareProxy.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AppEngine.V1.Model.IdentityAwareProxy do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.508772 | 280 | 0.750974 |
9ee9ae5269818c17890fff4ba262254d0050edcd | 122 | exs | Elixir | config/config.exs | gtronset/honeycomb | 0abe9283864aa7b7149b94fff515653da97bdf18 | [
"MIT"
] | null | null | null | config/config.exs | gtronset/honeycomb | 0abe9283864aa7b7149b94fff515653da97bdf18 | [
"MIT"
] | 1 | 2019-09-17T22:06:14.000Z | 2019-09-17T22:06:14.000Z | config/config.exs | gtronset/basalt | 0abe9283864aa7b7149b94fff515653da97bdf18 | [
"MIT"
] | null | null | null | use Mix.Config
if Mix.env() == :test do
config :junit_formatter,
report_dir: "/tmp/basalt-test-results/exunit"
end
| 17.428571 | 49 | 0.704918 |
9ee9d0a16882d9301979c8813e141ff3e8d9777b | 1,502 | ex | Elixir | lib/nerves_hub_ca/release/tasks.ex | nerves-hub/certificate_authority | 5d8c7bed3fc0b151b496592b3c1dc4422680699c | [
"Apache-2.0"
] | 10 | 2018-09-11T18:38:47.000Z | 2021-11-25T11:14:06.000Z | lib/nerves_hub_ca/release/tasks.ex | nerves-hub/certificate_authority | 5d8c7bed3fc0b151b496592b3c1dc4422680699c | [
"Apache-2.0"
] | 8 | 2018-06-12T20:56:10.000Z | 2019-08-01T16:10:49.000Z | lib/nerves_hub_ca/release/tasks.ex | nerves-hub/certificate_authority | 5d8c7bed3fc0b151b496592b3c1dc4422680699c | [
"Apache-2.0"
] | 7 | 2018-09-07T17:27:56.000Z | 2022-02-03T07:29:01.000Z | defmodule NervesHubCA.Release.Tasks do
alias Ecto.Migrator
@otp_app :nerves_hub_ca
@start_apps [:logger, :ssl, :postgrex, :ecto_sql]
def migrate_and_seed do
init(@otp_app, @start_apps)
run_migrations_for(@otp_app)
run_seed_script("#{seed_path(@otp_app)}/seeds.exs")
stop()
end
defp init(app, start_apps) do
IO.puts("Loading nerves_hub_ca app for migrations...")
Application.load(app)
IO.puts("Starting dependencies...")
Enum.each(start_apps, &Application.ensure_all_started/1)
IO.puts("Starting repos...")
app
|> Application.get_env(:ecto_repos, [])
|> Enum.each(& &1.start_link(pool_size: 2))
end
defp stop do
IO.puts("Success!")
:init.stop()
end
defp run_migrations_for(app) do
IO.puts("Running migrations for #{app}")
app
|> Application.get_env(:ecto_repos, [])
|> Enum.each(&Migrator.run(&1, migrations_path(app), :up, all: true))
end
def run_seed_script(seed_script) do
IO.puts("Running seed script #{seed_script}...")
Code.eval_file(seed_script)
end
defp migrations_path(app), do: priv_dir(app, ["repo", "migrations"])
defp seed_path(app), do: priv_dir(app, ["repo"])
defp priv_dir(app, path) when is_list(path) do
case :code.priv_dir(app) do
priv_path when is_list(priv_path) or is_binary(priv_path) ->
Path.join([priv_path] ++ path)
{:error, :bad_name} ->
raise ArgumentError, "unknown application: #{inspect(app)}"
end
end
end
| 24.225806 | 73 | 0.664447 |
9ee9ddd779386caca0ab8380d2d60cb90074d8bf | 1,585 | ex | Elixir | clients/memcache/lib/google_api/memcache/v1beta2/model/weekly_cycle.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/memcache/lib/google_api/memcache/v1beta2/model/weekly_cycle.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/memcache/lib/google_api/memcache/v1beta2/model/weekly_cycle.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Memcache.V1beta2.Model.WeeklyCycle do
@moduledoc """
Time window specified for weekly operations.
## Attributes
* `schedule` (*type:* `list(GoogleApi.Memcache.V1beta2.Model.Schedule.t)`, *default:* `nil`) - User can specify multiple windows in a week. Minimum of 1 window.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:schedule => list(GoogleApi.Memcache.V1beta2.Model.Schedule.t())
}
field(:schedule, as: GoogleApi.Memcache.V1beta2.Model.Schedule, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Memcache.V1beta2.Model.WeeklyCycle do
def decode(value, options) do
GoogleApi.Memcache.V1beta2.Model.WeeklyCycle.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Memcache.V1beta2.Model.WeeklyCycle do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.723404 | 164 | 0.745741 |
9ee9e382e043bb8ed1363847c769a86491f7eec8 | 2,125 | ex | Elixir | lib/ambry_web/views/api/book_view.ex | doughsay/ambry | c04e855bf06a6b00b8053c6eacb2eac14a56a37c | [
"MIT"
] | 12 | 2021-09-30T20:51:49.000Z | 2022-01-27T04:09:32.000Z | lib/ambry_web/views/api/book_view.ex | doughsay/ambry | c04e855bf06a6b00b8053c6eacb2eac14a56a37c | [
"MIT"
] | 76 | 2021-10-01T05:45:11.000Z | 2022-03-28T04:12:39.000Z | lib/ambry_web/views/api/book_view.ex | doughsay/ambry | c04e855bf06a6b00b8053c6eacb2eac14a56a37c | [
"MIT"
] | 2 | 2021-10-04T19:27:28.000Z | 2022-01-13T22:36:38.000Z | defmodule AmbryWeb.API.BookView do
use AmbryWeb, :view
alias Ambry.Books.Book
alias Ambry.Series.SeriesBook
alias AmbryWeb.API.BookView
def render("index.json", %{books: books, has_more?: has_more?}) do
%{
data: render_many(books, BookView, "book_index.json"),
hasMore: has_more?
}
end
def render("show.json", %{book: book}) do
%{data: render_one(book, BookView, "book_show.json")}
end
def render("book_index.json", %{book: book}) do
book_common(book)
end
def render("book_show.json", %{book: book}) do
book
|> book_common()
|> Map.merge(book_details(book))
end
defp book_common(%SeriesBook{book: book, book_number: book_number}) do
Map.merge(book_common(book), %{bookNumber: book_number})
end
defp book_common(%Book{} = book) do
%{
id: book.id,
title: book.title,
imagePath: book.image_path,
authors:
book
|> authors()
|> Enum.map(fn author ->
%{
id: author.id,
personId: author.person_id,
name: author.name
}
end),
series:
book.series_books
|> Enum.sort_by(& &1.series.name)
|> Enum.map(fn series_book ->
%{
id: series_book.series.id,
name: series_book.series.name,
bookNumber: series_book.book_number
}
end)
}
end
defp book_details(%Book{} = book) do
%{
description: book.description,
published: book.published,
media:
Enum.map(book.media, fn media ->
duration = Decimal.to_float(media.duration)
%{
id: media.id,
abridged: media.abridged,
fullCast: media.full_cast,
duration: duration,
narrators:
Enum.map(media.narrators, fn narrator ->
%{
personId: narrator.person_id,
name: narrator.name
}
end)
}
end)
}
end
defp authors(%Book{authors: authors}) when is_list(authors), do: authors
end
| 24.147727 | 74 | 0.552471 |
9eea1d70059e00bb7e1902be8f88f8e5a233a436 | 734 | ex | Elixir | lib/console_web/views/function_view.ex | isabella232/console-2 | d4a4aca0e11c945c9698f46cb171d4645177038a | [
"Apache-2.0"
] | null | null | null | lib/console_web/views/function_view.ex | isabella232/console-2 | d4a4aca0e11c945c9698f46cb171d4645177038a | [
"Apache-2.0"
] | 1 | 2021-04-03T09:29:31.000Z | 2021-04-03T09:29:31.000Z | lib/console_web/views/function_view.ex | isabella232/console-2 | d4a4aca0e11c945c9698f46cb171d4645177038a | [
"Apache-2.0"
] | null | null | null | defmodule ConsoleWeb.FunctionView do
use ConsoleWeb, :view
alias ConsoleWeb.FunctionView
def render("index.json", %{functions: functions}) do
render_many(functions, FunctionView, "function.json")
end
def render("show.json", %{function: function}) do
render_one(function, FunctionView, "function.json")
end
def render("function.json", %{function: function}) do
%{
id: function.id,
name: function.name,
type: function.type,
format: function.format,
body: function.body,
active: function.active,
}
end
def append_function(json, function) do
function_json = render_one(function, FunctionView, "show.json")
Map.put(json, :function, function_json)
end
end
| 25.310345 | 67 | 0.686649 |
9eea28b50acf9c9459ad8cc60b60540668c59d38 | 3,553 | ex | Elixir | lib/saxy/xmerl.ex | duffelhq/saxy | fb37f1d9ce919e6085a924c2483c515ee6cb997b | [
"MIT"
] | null | null | null | lib/saxy/xmerl.ex | duffelhq/saxy | fb37f1d9ce919e6085a924c2483c515ee6cb997b | [
"MIT"
] | null | null | null | lib/saxy/xmerl.ex | duffelhq/saxy | fb37f1d9ce919e6085a924c2483c515ee6cb997b | [
"MIT"
] | null | null | null | defmodule Saxy.Xmerl do
@moduledoc """
Provides functions to parse a XML document to
[xmerl format](https://github.com/erlang/otp/blob/master/lib/xmerl/include/xmerl.hrl)
data structure.
See "Types" section for more information.
"""
import Saxy.Xmerl.Records
@type position() :: integer()
@type name() :: atom()
@type expanded_name() :: charlist()
@type content() :: [text() | element()]
@type parent() :: {name(), position()}
@type namespace_info() :: {charlist(), charlist()}
@type value() :: [iolist() | atom() | integer()]
@type language() :: charlist()
@type namespace() ::
record(:xmlNamespace,
default: [],
nodes: []
)
@type text() ::
record(:xmlText,
value: value(),
pos: position(),
parents: [parent()],
language: language()
)
@type attribute() ::
record(:xmlAttribute,
name: name(),
expanded_name: expanded_name(),
nsinfo: namespace_info(),
namespace: namespace(),
pos: position(),
value: value(),
normalized: boolean()
)
@type element() ::
record(:xmlElement,
name: name(),
expanded_name: expanded_name(),
nsinfo: namespace_info(),
namespace: namespace(),
attributes: [attribute()],
pos: position(),
content: [content()],
parents: [parent()]
)
@doc """
Parses XML document into Erlang [xmerl](http://erlang.org/doc/man/xmerl.html) format.
Xmerl format requires tag and attribute names to be atoms. By default Saxy uses
`String.to_existing_atom/1` to avoid creating atoms at runtime. You could override
this behaviour by specifying `:atom_fun` option to `String.to_atom/1`.
Warning: However, `String.to_atom/1` function creates atoms dynamically and atoms are not
garbage-collected. Therefore, you should not use this if the input XML cannot be trusted,
such as input received from a socket or during a web request.
## Examples
iex> string = File.read!("./test/support/fixture/foo.xml")
iex> Saxy.Xmerl.parse_string(string)
{:ok,
{:xmlElement,
:foo,
:foo,
[],
{:xmlNamespace, [], []},
[],
1,
[{:xmlAttribute, :bar, :bar, [], [], [], 1, [], 'value', :undefined}],
[],
[],
[],
:undeclared}}
## Options
* `:atom_fun` - The function to convert string to atom. Defaults to `String.to_existing_atom/1`.
* `:expand_entity` - specifies how external entity references should be handled. Three supported strategies respectively are:
* `:keep` - keep the original binary, for example `Orange ®` will be expanded to `"Orange ®"`, this is the default strategy.
* `:skip` - skip the original binary, for example `Orange ®` will be expanded to `"Orange "`.
* `{mod, fun, args}` - take the applied result of the specified MFA.
"""
@spec parse_string(data :: binary()) :: {:ok, element()} | {:error, Saxy.ParseError.t()}
def parse_string(data, options \\ []) do
{atom_fun, options} = Keyword.pop(options, :atom_fun, &String.to_existing_atom/1)
state = %Saxy.Xmerl.State{atom_fun: atom_fun}
case Saxy.parse_string(data, __MODULE__.Handler, state, options) do
{:ok, %{stack: [document]}} ->
{:ok, document}
{:error, _reason} = error ->
error
end
end
end
| 30.110169 | 136 | 0.584858 |
9eea2ea8957ae4949f0f871b614912f38e43656c | 417 | exs | Elixir | test/exshome_web/views/error_view_test.exs | exshome/exshome | ef6b7a89f11dcd2016856dd49517b74aeebb6513 | [
"MIT"
] | 2 | 2021-12-21T16:32:56.000Z | 2022-02-22T17:06:39.000Z | test/exshome_web/views/error_view_test.exs | exshome/exshome | ef6b7a89f11dcd2016856dd49517b74aeebb6513 | [
"MIT"
] | null | null | null | test/exshome_web/views/error_view_test.exs | exshome/exshome | ef6b7a89f11dcd2016856dd49517b74aeebb6513 | [
"MIT"
] | null | null | null | defmodule ExshomeWeb.ErrorViewTest do
use ExshomeWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(ExshomeWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(ExshomeWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 27.8 | 92 | 0.733813 |
9eea34d1c7f0106b56b1661114e8ba3d568c4d71 | 1,064 | exs | Elixir | mix.exs | mrDarcyMurphy/elixir-phoenix-playground | 8ae7237609a52c9a7a3a31f354329acadfb092a9 | [
"MIT"
] | null | null | null | mix.exs | mrDarcyMurphy/elixir-phoenix-playground | 8ae7237609a52c9a7a3a31f354329acadfb092a9 | [
"MIT"
] | null | null | null | mix.exs | mrDarcyMurphy/elixir-phoenix-playground | 8ae7237609a52c9a7a3a31f354329acadfb092a9 | [
"MIT"
] | null | null | null | defmodule MyApp.Mixfile do
use Mix.Project
def project do
[app: :my_app,
version: "0.0.1",
elixir: "~> 1.0",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[mod: {MyApp, []},
applications: [:phoenix, :phoenix_html, :cowboy, :logger,
:phoenix_ecto, :postgrex]]
end
# Specifies which paths to compile per environment
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies
#
# Type `mix help deps` for examples and options
defp deps do
[{:phoenix, "~> 0.14"},
{:phoenix_ecto, "~> 0.5"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 1.1"},
{:phoenix_live_reload, "~> 0.4", only: :dev},
{:cowboy, "~> 1.0"}]
end
end
| 26.6 | 63 | 0.595865 |
9eea43416657cd089c5368908df318c1628fe3ce | 1,144 | exs | Elixir | config/config.exs | y86/getaways_backend | 47f3703c19173a5d5bd53b7bad3a5e3982edb584 | [
"MIT"
] | null | null | null | config/config.exs | y86/getaways_backend | 47f3703c19173a5d5bd53b7bad3a5e3982edb584 | [
"MIT"
] | 6 | 2020-01-31T19:44:15.000Z | 2021-09-02T04:26:49.000Z | config/config.exs | y86/getaways_backend | 47f3703c19173a5d5bd53b7bad3a5e3982edb584 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
use Mix.Config
config :cors_plug,
origin: ["http://localhost:3000", "http://192.168.1.39:63467", "*"],
max_age: 86400,
methods: ["GET", "POST"]
config :getaways,
ecto_repos: [Getaways.Repo]
# Configures the endpoint
config :getaways, GetawaysWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "r/KECNh6PcQMEwqy78veF/hGvvy+MAiOa9fL2tbURvl4D4K3FZiF4p8zwesH9+dW",
render_errors: [view: GetawaysWeb.ErrorView, accepts: ~w(json)],
pubsub: [name: Getaways.PubSub, adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 31.777778 | 86 | 0.749126 |
9eea5336595e539b47ae21915f66224b3228c483 | 2,688 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/region_instance_group_list.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/region_instance_group_list.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/region_instance_group_list.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.RegionInstanceGroupList do
@moduledoc """
Contains a list of InstanceGroup resources.
## Attributes
- id (String.t): [Output Only] Unique identifier for the resource; defined by the server. Defaults to: `null`.
- items ([InstanceGroup]): A list of InstanceGroup resources. Defaults to: `null`.
- kind (String.t): The resource type. Defaults to: `null`.
- nextPageToken (String.t): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is larger than maxResults, use the nextPageToken as a value for the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. Defaults to: `null`.
- selfLink (String.t): [Output Only] Server-defined URL for this resource. Defaults to: `null`.
- warning (AcceleratorTypeAggregatedListWarning): Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:id => any(),
:items => list(GoogleApi.Compute.V1.Model.InstanceGroup.t()),
:kind => any(),
:nextPageToken => any(),
:selfLink => any(),
:warning => GoogleApi.Compute.V1.Model.AcceleratorTypeAggregatedListWarning.t()
}
field(:id)
field(:items, as: GoogleApi.Compute.V1.Model.InstanceGroup, type: :list)
field(:kind)
field(:nextPageToken)
field(:selfLink)
field(:warning, as: GoogleApi.Compute.V1.Model.AcceleratorTypeAggregatedListWarning)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.RegionInstanceGroupList do
def decode(value, options) do
GoogleApi.Compute.V1.Model.RegionInstanceGroupList.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.RegionInstanceGroupList do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.666667 | 381 | 0.736235 |
9eea5d7e81a23f10a3404cff0efb9f46172cf24b | 1,122 | exs | Elixir | mix.exs | jbrisbin/exslackbot | dcf51ca63685b93afe93f8e317f2808e32e95569 | [
"Apache-2.0"
] | 4 | 2016-10-17T20:30:32.000Z | 2016-10-21T16:24:28.000Z | mix.exs | jbrisbin/exslackbot | dcf51ca63685b93afe93f8e317f2808e32e95569 | [
"Apache-2.0"
] | null | null | null | mix.exs | jbrisbin/exslackbot | dcf51ca63685b93afe93f8e317f2808e32e95569 | [
"Apache-2.0"
] | null | null | null | defmodule ExSlackBot.Mixfile do
use Mix.Project
def project do
[
app: :exslackbot,
version: "0.1.0",
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()
]
end
def description do
~S"""
ExSlackBot is a helper library for writing SlackBots using the Slack Real-Time Messaging API.
"""
end
def package do
[
maintainers: ["Jon Brisbin"],
licenses: ["Apache-2.0"],
links: %{GitHub: "https://github.com/jbrisbin/exslackbot"}
]
end
def application do
app = [applications: [:logger, :websocket_client, :slackex]]
cond do
Mix.env == :dev or Mix.env == :test ->
bots = Application.get_env(:exslackbot, :bots)
app ++ [mod: {ExSlackBot.Application, bots}]
true ->
app
end
end
defp deps do
[
{:websocket_client, "~> 1.1"},
{:slackex, "~> 0.0.1"},
{:temp, "~> 0.4"},
{:ex_spec, "~> 1.0.0", only: :test},
{:excoveralls, "~> 0.4.3", only: :test},
{:ex_doc, "~> 0.11", only: :dev}
]
end
end
| 22 | 97 | 0.543672 |
9eea879793881e12c21ca22e4ec98534a548299d | 4,861 | ex | Elixir | lib/release_tasks/migrate_schemas.ex | Yamilquery/kaufmann_ex | 5158ae8f524f8780647766ff35db88ebd761da29 | [
"MIT"
] | null | null | null | lib/release_tasks/migrate_schemas.ex | Yamilquery/kaufmann_ex | 5158ae8f524f8780647766ff35db88ebd761da29 | [
"MIT"
] | null | null | null | lib/release_tasks/migrate_schemas.ex | Yamilquery/kaufmann_ex | 5158ae8f524f8780647766ff35db88ebd761da29 | [
"MIT"
] | null | null | null | defmodule KaufmannEx.ReleaseTasks.MigrateSchemas do
@moduledoc """
Task for registering all schemas in `priv/schemas` with the schema registry.
Expects
- schemas to be defined in `priv/schemas`.
- an `event_metadata.avsc` schema should be defined and required by all events
Can be called in a production attached console, or via a release task. Should not have any requirements beyont itself.
This script will load all required dependencies and should not need further configuration.
```
# Attempt to create or update all schemas in `priv/schemas`
KaufmannEx.ReleaseTasks.MigrateSchemas.migrate_schemas(:app_name)
# delete and recreate all schemas
KaufmannEx.ReleaseTasks.MigrateSchemas.reset_schemas(:app_name)
```
"""
alias KaufmannEx.Schemas
# @schema_path 'priv/schemas'
defp ensure_startup do
:ok = Application.ensure_started(:logger)
{:ok, _} = Application.ensure_all_started(:httpoison)
{:ok, _} = Application.ensure_all_started(:kaufmann_ex)
end
defp priv_dir(app) do
"#{:code.priv_dir(app)}"
end
@doc """
Attempts to update all schemas defined in `app/priv/schemas`.
Expects a `event_metadata.avsc` metadata scheme to be defined for all other schemas.
"""
def migrate_schemas(app \\ :kaufmann_ex)
def migrate_schemas(path) when is_binary(path) do
true = File.exists?(path)
meta_data_schema = load_metadata(path)
path
|> scan_dir()
|> Enum.map(&load_and_parse_schema/1)
|> Enum.map(&inject_metadata(&1, meta_data_schema))
|> Enum.map(®ister_schema/1)
|> Enum.map(&IO.inspect/1)
end
def migrate_schemas(app) do
ensure_startup()
IO.puts("Migrating Schemas")
meta_data_schema = load_metadata(app)
app
|> priv_dir()
|> Path.join("schemas")
|> scan_dir()
|> Enum.map(&load_and_parse_schema/1)
|> Enum.map(&inject_metadata(&1, meta_data_schema))
|> Enum.map(®ister_schema/1)
|> Enum.map(&IO.inspect/1)
end
@doc """
Attempts to delete and recreate all schemas defined in `app/priv/schemas`
Expects a `event_metadata.avsc` metadata scheme to be defined for all other schemas.
"""
def reset_schemas(app \\ :kaufmann_ex) do
ensure_startup()
IO.puts("Resetting Schemas")
meta_data_schema = load_metadata(app)
app
|> priv_dir()
|> Path.join("schemas")
|> scan_dir()
|> Enum.map(&load_and_parse_schema/1)
|> Enum.map(&inject_metadata(&1, meta_data_schema))
|> Enum.map(&reset_schema/1)
|> Enum.map(&IO.inspect/1)
end
def load_metadata(path) when is_binary(path) do
meta_data_schema =
path
|> Path.join("event_metadata.avsc")
|> load_and_parse_schema()
{:ok, _, _} = register_schema(meta_data_schema)
meta_data_schema
end
def load_metadata(app) do
app
|> priv_dir()
|> Path.join("schemas")
|> load_metadata()
end
def schema_registered({schema_name, schema}) do
case Schemas.test(schema_name, schema) do
{:ok, res} -> {:ok, res}
{:error, %{"error_code" => 40_401}} -> {:ok, %{"is_compatible" => false}}
end
rescue
exception -> {:error, exception}
end
@spec register_schema({String.t(), map}) :: {atom, String.t(), any}
def register_schema({event_name, _} = schema) do
with {:ok, status} <- update_schema(schema) do
{:ok, event_name, status}
else
{:error, error} ->
{:error, event_name, error}
end
end
defp update_schema(schema) do
case Schemas.register(schema) do
{:ok, _} ->
{:ok, "Schema updated"}
{:error, %{"error_code" => 409}} ->
{:error, "Incompatible schema"}
{:error, error} ->
{:error, error}
end
end
def reset_schema({event_name, _} = schema) do
_ = Schemas.delete(event_name)
{:ok, _} = Schemas.register(schema)
end
@spec load_and_parse_schema(Path.t()) :: {String.t(), map}
defp load_and_parse_schema(schema_path) do
{:ok, schema} =
schema_path
|> File.read!()
|> Poison.decode()
schema_name = schema_path |> Path.basename() |> String.trim(".avsc")
{schema_name, schema}
end
defp inject_metadata({event_name, event_schema}, {_, meta_data_schema}) do
# Only inject metadata into event-type schemas
if String.match?(event_name, ~r/command\.|event\.|query\./) do
{event_name, [meta_data_schema, event_schema]}
else
{event_name, event_schema}
end
end
defp scan_dir(dir) do
files = File.ls!(dir)
child_schemas =
files
|> Enum.map(&Path.join(dir, &1))
|> Enum.filter(&File.dir?/1)
|> Enum.map(&scan_dir/1)
files
|> Enum.filter(&String.match?(&1, ~r/\.avsc/))
|> Enum.map(&Path.join(dir, &1))
|> Enum.concat(child_schemas)
|> List.flatten()
end
defp ok_and({:ok, right}) do
right
end
end
| 26.134409 | 120 | 0.651923 |
9eea9e97322ede7c8e45ec85fa23216ffa84de71 | 3,103 | exs | Elixir | test/unit/changeset/ueberauth_test.exs | pauldub/sentinel | 3230e92b68fa76e9a1f6c577bc1c271900e07c72 | [
"MIT"
] | 125 | 2016-01-29T11:46:20.000Z | 2021-06-08T09:25:38.000Z | test/unit/changeset/ueberauth_test.exs | pauldub/sentinel | 3230e92b68fa76e9a1f6c577bc1c271900e07c72 | [
"MIT"
] | 54 | 2016-02-18T01:11:58.000Z | 2017-10-19T11:25:26.000Z | test/unit/changeset/ueberauth_test.exs | britton-jb/sentinel | 3230e92b68fa76e9a1f6c577bc1c271900e07c72 | [
"MIT"
] | 29 | 2016-02-20T12:59:16.000Z | 2018-04-11T14:29:41.000Z | defmodule UeberauthTest do
use Sentinel.UnitCase
#alias Sentinel.Ueberauth
test "identity changeset validates presence of password when invitable is false" do
Mix.Config.persist([sentinel: [invitable: false]])
changeset = Sentinel.Ueberauth.changeset(%Sentinel.Ueberauth{}, %{provider: :identity})
assert changeset.errors[:password] == {"can't be blank", []}
changeset = Sentinel.Ueberauth.changeset(%Sentinel.Ueberauth{}, %{provider: :identity, uid: "1", password: ""})
assert changeset.errors[:password] == {"can't be blank", []}
changeset = Sentinel.Ueberauth.changeset(%Sentinel.Ueberauth{}, %{provider: :identity, uid: "1", password: nil})
assert changeset.errors[:password] == {"can't be blank", []}
end
test "identity changeset does not validates presence of password when invitable is true" do
Mix.Config.persist([sentinel: [invitable: true]])
changeset = Sentinel.Ueberauth.changeset(%Sentinel.Ueberauth{}, %{provider: :identity, uid: "1"})
refute changeset.errors[:password] == {"can't be blank", []}
changeset = Sentinel.Ueberauth.changeset(%Sentinel.Ueberauth{}, %{provider: :identity, uid: "1", password: ""})
refute changeset.errors[:password] == {"can't be blank", []}
changeset = Sentinel.Ueberauth.changeset(%Sentinel.Ueberauth{}, %{provider: :identity, uid: "1", password: nil})
refute changeset.errors[:password] == {"can't be blank", []}
end
test "identity changeset includes the hashed password if valid" do
user = Factory.insert(:user)
params = %{
provider: :identity,
credentials: %{
other: %{
password: "password",
password_confirmation: "password",
}
}
}
changeset = Sentinel.Ueberauth.changeset(%Sentinel.Ueberauth{user_id: user.id, uid: to_string(user.id)}, params)
hashed_pw = Ecto.Changeset.get_change(changeset, :hashed_password)
assert Sentinel.Config.crypto_provider.checkpw(params.credentials.other.password, hashed_pw)
end
test "identity changeset does not include the hashed password if invalid" do
changeset = Sentinel.Ueberauth.changeset(%Sentinel.Ueberauth{}, %{"password" => "secret"})
hashed_pw = Ecto.Changeset.get_change(changeset, :hashed_password)
assert hashed_pw == nil
end
test "identity changeset is invalid if user_id is not set" do
changeset = Sentinel.Ueberauth.changeset(%Sentinel.Ueberauth{}, %{provider: :identity, uid: "1"})
assert changeset.errors[:user_id] == {"can't be blank", [validation: :required]}
end
test "non-identity changeset cannont have password reset token" do
hashed_password_reset_token =
Sentinel.Ueberauth.changeset(%Sentinel.Ueberauth{}, %{provider: :facebook})
|> Ecto.Changeset.get_field(:hashed_password_reset_token, nil)
assert is_nil(hashed_password_reset_token)
end
test "non-identity changeset must include uid" do
changeset = Sentinel.Ueberauth.changeset(%Sentinel.Ueberauth{}, %{provider: :facebook})
assert changeset.errors[:uid] == {"can't be blank", [validation: :required]}
end
end
| 41.373333 | 116 | 0.700935 |
9eea9f1fba780bae6a7b80d27063f85b7dcff3ec | 148 | ex | Elixir | mix/no_localized_controller.ex | elixir-cldr/cldr_routes | 0082500daccbc2b0860c0bc451a1c3b33f3fc8c4 | [
"Apache-2.0"
] | null | null | null | mix/no_localized_controller.ex | elixir-cldr/cldr_routes | 0082500daccbc2b0860c0bc451a1c3b33f3fc8c4 | [
"Apache-2.0"
] | null | null | null | mix/no_localized_controller.ex | elixir-cldr/cldr_routes | 0082500daccbc2b0860c0bc451a1c3b33f3fc8c4 | [
"Apache-2.0"
] | null | null | null | defmodule NotLocalizedController do
use MyAppWeb, :controller
def show(conn, _params) do
# %{"page" => "hello"} = params
conn
end
end | 18.5 | 35 | 0.668919 |
9eeacfae105388a2bd2c3cde9ddc771ec520a7c5 | 563 | exs | Elixir | 2017/elixir/day15/mix.exs | zakora/elixir-aoc2017 | 216e92cef370081cc0792102e0b40dd3a518d8bf | [
"Unlicense"
] | null | null | null | 2017/elixir/day15/mix.exs | zakora/elixir-aoc2017 | 216e92cef370081cc0792102e0b40dd3a518d8bf | [
"Unlicense"
] | null | null | null | 2017/elixir/day15/mix.exs | zakora/elixir-aoc2017 | 216e92cef370081cc0792102e0b40dd3a518d8bf | [
"Unlicense"
] | null | null | null | defmodule Day15.Mixfile do
use Mix.Project
def project do
[
app: :day15,
version: "0.1.0",
elixir: "~> 1.5",
start_permanent: Mix.env == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"},
]
end
end
| 19.413793 | 88 | 0.571936 |
9eeb06121f00aae6675e71abb999f1d8178c51b0 | 599 | exs | Elixir | backend/test/views/error_view_test.exs | hploscar/palike | 71618593ee6e6687e0d1cdc9e923ed8f9c2cc2cb | [
"MIT"
] | null | null | null | backend/test/views/error_view_test.exs | hploscar/palike | 71618593ee6e6687e0d1cdc9e923ed8f9c2cc2cb | [
"MIT"
] | null | null | null | backend/test/views/error_view_test.exs | hploscar/palike | 71618593ee6e6687e0d1cdc9e923ed8f9c2cc2cb | [
"MIT"
] | null | null | null | defmodule Palike.ErrorViewTest do
use Palike.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.json" do
assert render(Palike.ErrorView, "404.json", []) ==
%{errors: %{detail: "Page not found"}}
end
test "render 500.json" do
assert render(Palike.ErrorView, "500.json", []) ==
%{errors: %{detail: "Internal server error"}}
end
test "render any other" do
assert render(Palike.ErrorView, "505.json", []) ==
%{errors: %{detail: "Internal server error"}}
end
end
| 27.227273 | 66 | 0.642738 |
9eeb3912e7a17cc1eece0d98a0f8abd151d5ba9f | 1,132 | ex | Elixir | lib/arc_ecto/definition.ex | amatalai/arc_ecto | 717efd790cc344ba3f12f29127361e0a90f86ae6 | [
"Apache-2.0"
] | null | null | null | lib/arc_ecto/definition.ex | amatalai/arc_ecto | 717efd790cc344ba3f12f29127361e0a90f86ae6 | [
"Apache-2.0"
] | null | null | null | lib/arc_ecto/definition.ex | amatalai/arc_ecto | 717efd790cc344ba3f12f29127361e0a90f86ae6 | [
"Apache-2.0"
] | null | null | null | defmodule Arc.Ecto.Definition do
defmacro __using__(_options) do
definition = __CALLER__.module
quote do
defmodule Module.concat(unquote(definition), "Type") do
@behaviour Ecto.Type
def type, do: Arc.Ecto.Type.type
def cast(value), do: Arc.Ecto.Type.cast(unquote(definition), value)
def load(value), do: Arc.Ecto.Type.load(unquote(definition), value)
def dump(value), do: Arc.Ecto.Type.dump(unquote(definition), value)
end
def url({%{file_name: file_name, updated_at: updated_at}, scope}, version, options) do
url = super({file_name, scope}, version, options)
if options[:signed] do
url
else
case updated_at do
%Ecto.DateTime{} ->
stamp = :calendar.datetime_to_gregorian_seconds(Ecto.DateTime.to_erl(updated_at))
case URI.parse(url).query do
nil -> url <> "?v=#{stamp}"
_ -> url <> "&v=#{stamp}"
end
_ -> url
end
end
end
def url(f, v, options), do: super(f, v, options)
end
end
end
| 31.444444 | 95 | 0.575972 |
9eeb5430e0a2149bcf1f163b47e3c87b577e9ca3 | 648 | ex | Elixir | lib/inch_ex/setup/readme_badge.ex | AndrewDryga/inch_ex | 59997ce05b60902c9b482409b09da7697b267c17 | [
"MIT"
] | null | null | null | lib/inch_ex/setup/readme_badge.ex | AndrewDryga/inch_ex | 59997ce05b60902c9b482409b09da7697b267c17 | [
"MIT"
] | null | null | null | lib/inch_ex/setup/readme_badge.ex | AndrewDryga/inch_ex | 59997ce05b60902c9b482409b09da7697b267c17 | [
"MIT"
] | 1 | 2022-03-17T18:34:45.000Z | 2022-03-17T18:34:45.000Z | defmodule InchEx.Setup.ReadmeBadge do
@readme_filename "README.md"
def run? do
File.exists?(@readme_filename)
end
def run(output) do
if run?() do
IO.puts ""
extract_url(output) |> get_badge_url |> textify |> InchEx.Setup.print
end
end
defp textify(badge_url) do
"""
## Documentation as first-class citizen
You can now add this badge to your #{@readme_filename}:
#{badge_url}
"""
end
defp extract_url(text) do
[_, url] = Regex.run(~r/URL:\ (.+)$/, text)
url
end
defp get_badge_url(project_url) do
"[](#{project_url})"
end
end
| 19.058824 | 75 | 0.623457 |
9eeb604904bebe1f042ff1d6bbdb63a15b285c74 | 228 | exs | Elixir | config/config.exs | altjohndev/my-website | 87d7259744e4d4d0575145aa9131ab7ce4b4f270 | [
"Unlicense"
] | null | null | null | config/config.exs | altjohndev/my-website | 87d7259744e4d4d0575145aa9131ab7ce4b4f270 | [
"Unlicense"
] | 2 | 2021-03-10T08:46:52.000Z | 2021-05-11T04:14:45.000Z | config/config.exs | altjohndev/my-website | 87d7259744e4d4d0575145aa9131ab7ce4b4f270 | [
"Unlicense"
] | null | null | null | import Config
wildcard_import = fn wildcard ->
for config <- wildcard |> Path.expand(__DIR__) |> Path.wildcard() do
import_config config
end
end
wildcard_import.("general/*.exs")
wildcard_import.("#{Mix.env()}/*.exs")
| 20.727273 | 70 | 0.697368 |
9eeb83ff637fed46bcff5e8bd9f89f7491d48e3a | 15,364 | ex | Elixir | lib/iex/lib/iex/autocomplete.ex | farhadi/elixir | 0a37064fd5b67639afb845ff321d6c8ee61e5ee5 | [
"Apache-2.0"
] | 1 | 2019-05-07T12:01:44.000Z | 2019-05-07T12:01:44.000Z | lib/iex/lib/iex/autocomplete.ex | farhadi/elixir | 0a37064fd5b67639afb845ff321d6c8ee61e5ee5 | [
"Apache-2.0"
] | null | null | null | lib/iex/lib/iex/autocomplete.ex | farhadi/elixir | 0a37064fd5b67639afb845ff321d6c8ee61e5ee5 | [
"Apache-2.0"
] | null | null | null | defmodule IEx.Autocomplete do
@moduledoc false
@doc """
The expansion logic.
Some of the expansion has to be use the current shell
environemnt, which is found via the broker.
"""
def expand(expr, server \\ IEx.Broker)
def expand('', server) do
expand_variable_or_import("", server)
end
def expand([h | t] = expr, server) do
helper = get_helper(expr)
cond do
helper == ?t ->
expand_custom(expr, server, &get_module_types/1)
helper == ?b ->
expand_custom(expr, server, &get_module_callbacks/1)
h == ?. and t != [] ->
expand_dot(reduce(t), server)
h == ?: and t == [] ->
expand_erlang_modules()
identifier?(h) ->
expand_expr(reduce(expr), server)
h == ?/ and t != [] and identifier?(hd(t)) ->
expand_expr(reduce(t), server)
h in '([{' ->
expand('')
true ->
no()
end
end
defp get_helper(expr) do
with [helper | rest] when helper in 'bt' <- Enum.reverse(expr),
[space_or_paren, char | _] <- squeeze_spaces(rest),
true <-
space_or_paren in ' (' and
(char in ?A..?Z or char in ?a..?z or char in ?0..?9 or char in '_:') do
helper
else
_ ->
nil
end
end
defp squeeze_spaces(' ' ++ rest), do: squeeze_spaces([?\s | rest])
defp squeeze_spaces(rest), do: rest
@doc false
def exports(mod) do
if Code.ensure_loaded?(mod) and function_exported?(mod, :__info__, 1) do
mod.__info__(:macros) ++ (mod.__info__(:functions) -- [__info__: 1])
else
mod.module_info(:exports) -- [module_info: 0, module_info: 1]
end
end
defp identifier?(h) do
h in ?a..?z or h in ?A..?Z or h in ?0..?9 or h in [?_, ??, ?!]
end
defp expand_dot(expr, server) do
case Code.string_to_quoted(expr) do
{:ok, atom} when is_atom(atom) ->
expand_call(atom, "", server)
{:ok, {:__aliases__, _, list}} ->
expand_elixir_modules(list, "", server)
{:ok, {_, _, _} = ast_node} ->
expand_call(ast_node, "", server)
_ ->
no()
end
end
defp expand_expr(expr, server) do
case Code.string_to_quoted(expr) do
{:ok, atom} when is_atom(atom) ->
expand_erlang_modules(Atom.to_string(atom))
{:ok, {atom, _, nil}} when is_atom(atom) ->
expand_variable_or_import(Atom.to_string(atom), server)
{:ok, {:__aliases__, _, [root]}} ->
expand_elixir_modules([], Atom.to_string(root), server)
{:ok, {:__aliases__, _, [h | _] = list}} when is_atom(h) ->
hint = Atom.to_string(List.last(list))
list = Enum.take(list, length(list) - 1)
expand_elixir_modules(list, hint, server)
{:ok, {{:., _, [ast_node, fun]}, _, []}} when is_atom(fun) ->
expand_call(ast_node, Atom.to_string(fun), server)
_ ->
no()
end
end
defp expand_custom([?. | expr], server, fun) do
case Code.string_to_quoted(reduce(expr)) do
{:ok, atom} when is_atom(atom) ->
no()
{:ok, {:__aliases__, _, [h | _] = list}} when is_atom(h) ->
case expand_alias(list, server) do
{:ok, alias} ->
expand_elixir_module_custom(alias, "", fun)
:error ->
no()
end
_ ->
no()
end
end
defp expand_custom(expr, server, fun) do
case Code.string_to_quoted(reduce(expr)) do
{:ok, atom} when is_atom(atom) ->
expand_erlang_modules(Atom.to_string(atom))
{:ok, {:__aliases__, _, [root]}} ->
expand_elixir_modules([], Atom.to_string(root), server)
{:ok, {:__aliases__, _, [h | _] = list}} when is_atom(h) ->
hint = Atom.to_string(List.last(list))
list = Enum.take(list, length(list) - 1)
expand_elixir_modules(list, hint, server)
{:ok, {{:., _, [{:__aliases__, _, list}, type]}, _, []}} when is_atom(type) ->
case expand_alias(list, server) do
{:ok, alias} ->
expand_elixir_module_custom(alias, Atom.to_string(type), fun)
:error ->
no()
end
_ ->
no()
end
end
defp reduce(expr) do
Enum.reduce(' ([{', expr, fn token, acc ->
hd(:string.tokens(acc, [token]))
end)
|> Enum.reverse()
|> trim_leading(?&)
|> trim_leading(?%)
end
defp trim_leading([char | rest], char), do: rest
defp trim_leading(expr, _char), do: expr
defp yes(hint, entries) do
{:yes, String.to_charlist(hint), Enum.map(entries, &String.to_charlist/1)}
end
defp no do
{:no, '', []}
end
## Formatting
defp format_expansion([], _) do
no()
end
defp format_expansion([uniq], hint) do
case to_hint(uniq, hint) do
"" -> yes("", to_uniq_entries(uniq))
hint -> yes(hint, [])
end
end
defp format_expansion([first | _] = entries, hint) do
binary = Enum.map(entries, & &1.name)
length = byte_size(hint)
prefix = :binary.longest_common_prefix(binary)
if prefix in [0, length] do
yes("", Enum.flat_map(entries, &to_entries/1))
else
yes(binary_part(first.name, prefix, length - prefix), [])
end
end
## Expand calls
# :atom.fun
defp expand_call(mod, hint, _server) when is_atom(mod) do
expand_require(mod, hint)
end
# Elixir.fun
defp expand_call({:__aliases__, _, list}, hint, server) do
case expand_alias(list, server) do
{:ok, alias} -> expand_require(alias, hint)
:error -> no()
end
end
# variable.fun_or_key
defp expand_call({_, _, _} = ast_node, hint, server) do
case value_from_binding(ast_node, server) do
{:ok, mod} when is_atom(mod) -> expand_call(mod, hint, server)
{:ok, map} when is_map(map) -> expand_map_field_access(map, hint)
_otherwise -> no()
end
end
defp expand_call(_, _, _) do
no()
end
defp expand_map_field_access(map, hint) do
case match_map_fields(map, hint) do
[%{kind: :map_key, name: ^hint, value_is_map: false}] -> no()
map_fields when is_list(map_fields) -> format_expansion(map_fields, hint)
end
end
defp expand_require(mod, hint) do
format_expansion(match_module_funs(get_module_funs(mod), hint), hint)
end
defp expand_variable_or_import(hint, server) do
variables = expand_variable(hint, server)
imports = imports_from_env(server)
module_funs = get_module_funs(Kernel.SpecialForms)
funs = match_module_funs(imports ++ module_funs, hint)
format_expansion(variables ++ funs, hint)
end
defp expand_variable(hint, server) do
variables_from_binding(hint, server)
|> Enum.sort()
|> Enum.map(&%{kind: :variable, name: &1})
end
## Erlang modules
defp expand_erlang_modules(hint \\ "") do
format_expansion(match_erlang_modules(hint), hint)
end
defp match_erlang_modules(hint) do
for mod <- match_modules(hint, true), usable_as_unquoted_module?(mod) do
%{kind: :module, name: mod, type: :erlang}
end
end
## Elixir modules
defp expand_elixir_modules([], hint, server) do
aliases = match_aliases(hint, server)
expand_elixir_modules_from_aliases(Elixir, hint, aliases)
end
defp expand_elixir_modules(list, hint, server) do
case expand_alias(list, server) do
{:ok, alias} -> expand_elixir_modules_from_aliases(alias, hint, [])
:error -> no()
end
end
defp expand_elixir_modules_from_aliases(mod, hint, aliases) do
aliases
|> Kernel.++(match_elixir_modules(mod, hint))
|> Kernel.++(match_module_funs(get_module_funs(mod), hint))
|> format_expansion(hint)
end
defp expand_alias([name | rest], server) when is_atom(name) do
case Keyword.fetch(aliases_from_env(server), Module.concat(Elixir, name)) do
{:ok, name} when rest == [] -> {:ok, name}
{:ok, name} -> {:ok, Module.concat([name | rest])}
:error -> {:ok, Module.concat([name | rest])}
end
end
defp expand_alias([_ | _], _) do
:error
end
defp match_aliases(hint, server) do
for {alias, _mod} <- aliases_from_env(server),
[name] = Module.split(alias),
String.starts_with?(name, hint) do
%{kind: :module, type: :alias, name: name}
end
end
defp match_elixir_modules(module, hint) do
name = Atom.to_string(module)
depth = length(String.split(name, ".")) + 1
base = name <> "." <> hint
for mod <- match_modules(base, module == Elixir),
parts = String.split(mod, "."),
depth <= length(parts),
name = Enum.at(parts, depth - 1),
valid_alias_piece?("." <> name),
uniq: true,
do: %{kind: :module, type: :elixir, name: name}
end
defp valid_alias_piece?(<<?., char, rest::binary>>) when char in ?A..?Z,
do: valid_alias_rest?(rest)
defp valid_alias_piece?(_), do: false
defp valid_alias_rest?(<<char, rest::binary>>)
when char in ?A..?Z
when char in ?a..?z
when char in ?0..?9
when char == ?_,
do: valid_alias_rest?(rest)
defp valid_alias_rest?(<<>>), do: true
defp valid_alias_rest?(rest), do: valid_alias_piece?(rest)
## Elixir Types
defp expand_elixir_module_custom(mod, hint, fun) do
types = match_module_funs(fun.(mod), hint)
format_expansion(types, hint)
end
## Helpers
defp usable_as_unquoted_module?(name) do
# Conversion to atom is not a problem because
# it is only called with existing modules names.
Code.Identifier.classify(String.to_atom(name)) != :other
end
defp match_modules(hint, root) do
get_modules(root)
|> Enum.sort()
|> Enum.dedup()
|> Enum.drop_while(&(not String.starts_with?(&1, hint)))
|> Enum.take_while(&String.starts_with?(&1, hint))
end
defp get_modules(true) do
["Elixir.Elixir"] ++ get_modules(false)
end
defp get_modules(false) do
modules = Enum.map(:code.all_loaded(), &Atom.to_string(elem(&1, 0)))
case :code.get_mode() do
:interactive -> modules ++ get_modules_from_applications()
_otherwise -> modules
end
end
defp get_modules_from_applications do
for [app] <- loaded_applications(),
{:ok, modules} = :application.get_key(app, :modules),
module <- modules do
Atom.to_string(module)
end
end
defp loaded_applications do
# If we invoke :application.loaded_applications/0,
# it can error if we don't call safe_fixtable before.
# Since in both cases we are reaching over the
# application controller internals, we choose to match
# for performance.
:ets.match(:ac_tab, {{:loaded, :"$1"}, :_})
end
defp match_module_funs(funs, hint) do
for {fun, arity} <- funs, name = Atom.to_string(fun), String.starts_with?(name, hint) do
%{
kind: :function,
name: name,
arity: arity
}
end
|> Enum.sort_by(&{&1.name, &1.arity})
end
defp match_map_fields(map, hint) do
for {key, value} when is_atom(key) <- Map.to_list(map),
key = Atom.to_string(key),
String.starts_with?(key, hint) do
%{kind: :map_key, name: key, value_is_map: is_map(value)}
end
|> Enum.sort_by(& &1.name)
end
defp get_module_funs(mod) do
cond do
not ensure_loaded?(mod) ->
[]
docs = get_docs(mod, [:function, :macro]) ->
exports(mod)
|> Kernel.--(default_arg_functions_with_doc_false(docs))
|> Enum.reject(&hidden_fun?(&1, docs))
true ->
exports(mod)
end
end
defp get_module_types(mod) do
cond do
not ensure_loaded?(mod) ->
[]
docs = get_docs(mod, [:type]) ->
Enum.map(docs, &extract_name_and_arity/1)
true ->
exports(mod)
end
end
defp get_module_callbacks(mod) do
cond do
not ensure_loaded?(mod) ->
[]
docs = get_docs(mod, [:callback, :macrocallback]) ->
Enum.map(docs, &extract_name_and_arity/1)
true ->
exports(mod)
end
end
defp get_docs(mod, kinds) do
case Code.fetch_docs(mod) do
{:docs_v1, _, _, _, _, _, docs} ->
for {{kind, _, _}, _, _, _, _} = doc <- docs, kind in kinds, do: doc
{:error, _} ->
nil
end
end
defp extract_name_and_arity({{_, name, arity}, _, _, _, _}), do: {name, arity}
defp default_arg_functions_with_doc_false(docs) do
for {{_, fun_name, arity}, _, _, :hidden, %{defaults: count}} <- docs,
new_arity <- (arity - count)..arity,
do: {fun_name, new_arity}
end
defp hidden_fun?({name, arity}, docs) do
case Enum.find(docs, &match?({{_, ^name, ^arity}, _, _, _, _}, &1)) do
nil -> underscored_fun?(name)
{_, _, _, :hidden, _} -> true
{_, _, _, :none, _} -> underscored_fun?(name)
{_, _, _, _, _} -> false
end
end
defp underscored_fun?(name), do: hd(Atom.to_charlist(name)) == ?_
defp ensure_loaded?(Elixir), do: false
defp ensure_loaded?(mod), do: Code.ensure_loaded?(mod)
## Ad-hoc conversions
defp to_entries(%{kind: kind, name: name})
when kind in [:map_key, :module, :variable] do
[name]
end
defp to_entries(%{kind: :function, name: name, arity: arity}) do
["#{name}/#{arity}"]
end
defp to_uniq_entries(%{kind: kind})
when kind in [:map_key, :module, :variable] do
[]
end
defp to_uniq_entries(%{kind: :function} = fun) do
to_entries(fun)
end
defp to_hint(%{kind: :module, name: name}, hint) when name == hint do
format_hint(name, name) <> "."
end
defp to_hint(%{kind: :map_key, name: name, value_is_map: true}, hint) when name == hint do
format_hint(name, hint) <> "."
end
defp to_hint(%{kind: kind, name: name}, hint)
when kind in [:function, :map_key, :module, :variable] do
format_hint(name, hint)
end
defp format_hint(name, hint) do
hint_size = byte_size(hint)
binary_part(name, hint_size, byte_size(name) - hint_size)
end
## Evaluator interface
defp imports_from_env(server) do
with {evaluator, server} <- server.evaluator(),
env_fields = IEx.Evaluator.fields_from_env(evaluator, server, [:functions, :macros]),
%{functions: funs, macros: macros} <- env_fields do
Enum.flat_map(funs ++ macros, &elem(&1, 1))
else
_ -> []
end
end
defp aliases_from_env(server) do
with {evaluator, server} <- server.evaluator(),
%{aliases: aliases} <- IEx.Evaluator.fields_from_env(evaluator, server, [:aliases]) do
aliases
else
_ -> []
end
end
defp variables_from_binding(hint, server) do
with {evaluator, server} <- server.evaluator() do
IEx.Evaluator.variables_from_binding(evaluator, server, hint)
else
_ -> []
end
end
defp value_from_binding(ast_node, server) do
with {evaluator, server} <- server.evaluator(),
{var, map_key_path} <- extract_from_ast(ast_node, []) do
IEx.Evaluator.value_from_binding(evaluator, server, var, map_key_path)
else
_ -> :error
end
end
defp extract_from_ast(var_name, acc) when is_atom(var_name) do
{var_name, acc}
end
defp extract_from_ast({var_name, _, nil}, acc) when is_atom(var_name) do
{var_name, acc}
end
defp extract_from_ast({{:., _, [ast_node, fun]}, _, []}, acc) when is_atom(fun) do
extract_from_ast(ast_node, [fun | acc])
end
defp extract_from_ast(_ast_node, _acc) do
:error
end
end
| 26.535406 | 95 | 0.609281 |
9eebb5f8bbed9fe7efba77c9e4b2c3a1a5e2eb9e | 945 | exs | Elixir | nucleotide-count/nucleotide_count.exs | RamanBut-Husaim/exercism.elixir | 683bb3b5700945dbbebcedf26d37208d4201ef49 | [
"MIT"
] | null | null | null | nucleotide-count/nucleotide_count.exs | RamanBut-Husaim/exercism.elixir | 683bb3b5700945dbbebcedf26d37208d4201ef49 | [
"MIT"
] | null | null | null | nucleotide-count/nucleotide_count.exs | RamanBut-Husaim/exercism.elixir | 683bb3b5700945dbbebcedf26d37208d4201ef49 | [
"MIT"
] | null | null | null | defmodule NucleotideCount do
@nucleotides [?A, ?C, ?G, ?T]
@doc """
Counts individual nucleotides in a NucleotideCount strand.
## Examples
iex> NucleotideCount.count('AATAA', ?A)
4
iex> NucleotideCount.count('AATAA', ?T)
1
"""
@spec count([char], char) :: non_neg_integer
def count(strand, nucleotide) do
histogram = histogram(strand)
Map.get(histogram, nucleotide, 0)
end
@doc """
Returns a summary of counts by nucleotide.
## Examples
iex> NucleotideCount.histogram('AATAA')
%{?A => 4, ?T => 1, ?C => 0, ?G => 0}
"""
@spec histogram([char]) :: map
def histogram(strand) do
accum = %{?A => 0, ?T => 0, ?C => 0, ?G => 0}
histogram(strand, accum)
end
defp histogram([], accum) do
accum
end
defp histogram([n | others], accum) do
nucleotideCounter = Map.get(accum, n, 0)
accum = Map.put(accum, n, nucleotideCounter + 1)
histogram(others, accum)
end
end
| 19.6875 | 60 | 0.615873 |
9eebc67d93023b27fc33f6f0629284f7febd7444 | 2,190 | ex | Elixir | lib/blue_bird/conn_logger.ex | Bleachr/blue_bird | 5ea1d1f047ce379bc73b6d7892fdd44392b09569 | [
"MIT"
] | null | null | null | lib/blue_bird/conn_logger.ex | Bleachr/blue_bird | 5ea1d1f047ce379bc73b6d7892fdd44392b09569 | [
"MIT"
] | null | null | null | lib/blue_bird/conn_logger.ex | Bleachr/blue_bird | 5ea1d1f047ce379bc73b6d7892fdd44392b09569 | [
"MIT"
] | 1 | 2021-03-02T05:14:01.000Z | 2021-03-02T05:14:01.000Z | defmodule BlueBird.ConnLogger do
@moduledoc """
Logs connections in test cases.
`BlueBird.ConnLogger` is used to cache `%Plug.Conn` structs. To use it, you
have to call `start/0` in `test/test_helper.exs`:
BlueBird.start()
ExUnit.start(formatters: [ExUnit.CLIFormatter, BlueBird.Formatter])
You can then use `BlueBird.ConnLogger.save(conn)` in your tests.
defmodule MyApp.Web.UserControllerTest do
use MyApp.Web.ConnCase
alias BlueBird.ConnLogger
test "returns a single user", %{conn: conn} do
user = user_fixture()
conn = conn
|> get(conn, user_path(conn, :index, user.id))
|> ConnLogger.save()
assert json_response(conn, 200)["data"] == %{name: user.name}
end
end
"""
use GenServer
## Public Interface
@doc """
Starts the GenServer.
Returns `{:ok, pid}` on success. Raises error on failure.
## Example
iex> start_link()
{:ok, #PID<0.80.0>}
"""
@spec start_link :: {:ok, pid}
def start_link do
{:ok, _} = GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
@doc """
Returns the logged connections.
## Example
iex> get_conns()
[%Plug.Conn{}, ...]
"""
@spec get_conns :: [Plug.Conn.t()]
def get_conns, do: GenServer.call(__MODULE__, :get_conns)
@doc """
Resets the logged connections.
## Example
iex> reset()
:ok
"""
@spec reset :: :ok
def reset, do: GenServer.call(__MODULE__, :reset)
@doc """
Saves the given connection to the list.
## Example
iex> save(conn)
:ok
"""
@spec save(Plug.Conn.t()) :: :ok
def save(conn, opts \\ []) do
GenServer.cast(__MODULE__, {:save, conn, opts})
conn
end
## Callbacks
@impl true
def init(init_arg) do
{:ok, init_arg}
end
@impl true
def handle_call(:get_conns, _from, conns), do: {:reply, conns, conns}
def handle_call(:reset, _from, _conns), do: {:reply, [], []}
@impl true
def handle_cast({:save, conn, opts}, conns) do
conn = conn
|> Plug.Conn.assign(:request_name, Keyword.get(opts, :request_name, nil))
{:noreply, conns ++ [conn]}
end
end
| 21.262136 | 84 | 0.605023 |
9eebc87775e2bb8cd96c7d9d121becd734a02efb | 8,239 | exs | Elixir | test/zstream_test.exs | kianmeng/zstream | f8f24c8b9903ecd759517da988e11bb1c958f243 | [
"MIT"
] | null | null | null | test/zstream_test.exs | kianmeng/zstream | f8f24c8b9903ecd759517da988e11bb1c958f243 | [
"MIT"
] | null | null | null | test/zstream_test.exs | kianmeng/zstream | f8f24c8b9903ecd759517da988e11bb1c958f243 | [
"MIT"
] | null | null | null | defmodule ZstreamTest do
require Logger
use ExUnit.Case
doctest Zstream
test "zip" do
verify([
Zstream.entry("kafan", file("kafan.txt")),
Zstream.entry("kafka_uncompressed", file("kafan.txt"), coder: Zstream.Coder.Stored)
])
verify([
Zstream.entry("कफ़न", file("kafan.txt"))
])
verify([
Zstream.entry("empty_file", []),
Zstream.entry("empty_file_1", [], coder: Zstream.Coder.Stored)
])
verify([
Zstream.entry("moby.txt", file("moby_dick.txt"), coder: Zstream.Coder.Stored),
Zstream.entry("deep/moby.txt", file("moby_dick.txt"), coder: Zstream.Coder.Stored),
Zstream.entry("deep/deep/deep/deep/moby.txt", file("moby_dick.txt"),
coder: Zstream.Coder.Stored
)
])
verify([
Zstream.entry("empty_folder/.keep", [])
])
end
test "unzip" do
verify_unzip("uncompressed")
verify_unzip("compressed-standard")
verify_unzip("compressed-flags-set")
verify_unzip("trail")
verify_unzip("padding")
verify_unzip("zip64")
verify_unzip("zip64-1")
verify_unzip("zip64-2")
end
test "unsupported unzip" do
verify_unzip_error(
"compressed-OSX-Finder/archive.zip",
"Zip files with data descriptor record are not supported"
)
verify_unzip_error(
"invalid/archive.zip",
"Invalid local header"
)
verify_unzip_error(
"zipbomb/42-password-42.zip",
"Unsupported compression method 99"
)
verify_unzip_error(
"zip64-dd/archive.zip",
"Zip files with data descriptor record are not supported"
)
end
test "zip bomb" do
files = [
"zipbomb/zbsm.zip",
"zipbomb/42-passwordless.zip",
"zipbomb/338.zip",
"zipbomb/droste.zip",
"zipbomb/zip-bomb.zip"
]
Enum.each(files, fn path ->
file(path)
|> Zstream.unzip()
|> Stream.run()
end)
end
test "password" do
password = Base.encode64(:crypto.strong_rand_bytes(12))
verify_password(
[
Zstream.entry("kafan", file("kafan.txt"),
encryption_coder: {Zstream.EncryptionCoder.Traditional, password: password}
),
Zstream.entry("kafka_uncompressed", file("kafan.txt"),
coder: Zstream.Coder.Stored,
encryption_coder: {Zstream.EncryptionCoder.Traditional, password: password}
)
],
password
)
verify_password(
[
Zstream.entry("कफ़न", file("kafan.txt"),
encryption_coder: {Zstream.EncryptionCoder.Traditional, password: password}
)
],
password
)
verify_password(
[
Zstream.entry("empty_file", [],
encryption_coder: {Zstream.EncryptionCoder.Traditional, password: password}
),
Zstream.entry("empty_file_1", [],
coder: Zstream.Coder.Stored,
encryption_coder: {Zstream.EncryptionCoder.Traditional, password: password}
)
],
password
)
verify_password(
[
Zstream.entry("moby.txt", file("moby_dick.txt"),
coder: Zstream.Coder.Stored,
encryption_coder: {Zstream.EncryptionCoder.Traditional, password: password}
),
Zstream.entry("deep/moby.txt", file("moby_dick.txt"),
coder: Zstream.Coder.Stored,
encryption_coder: {Zstream.EncryptionCoder.Traditional, password: password}
),
Zstream.entry(
"deep/deep/deep/deep/moby.txt",
file("moby_dick.txt"),
coder: Zstream.Coder.Stored,
encryption_coder: {Zstream.EncryptionCoder.Traditional, password: password}
)
],
password
)
verify_password(
[
Zstream.entry("empty_folder/.keep", [],
encryption_coder: {Zstream.EncryptionCoder.Traditional, password: password}
)
],
password
)
end
test "stream" do
big_file = Stream.repeatedly(&random_bytes/0) |> Stream.take(200)
assert_memory()
Zstream.zip([
Zstream.entry("big_file", big_file),
Zstream.entry("big_file_2", big_file, coder: Zstream.Coder.Stored)
])
|> Stream.run()
assert_memory()
end
defmodule MockCoder do
@behaviour Zstream.Coder
def init(_opts), do: nil
def encode(chunk, nil), do: {chunk, nil}
def close(nil) do
send(self(), :closed)
[]
end
def compression_method, do: 0
end
test "resource handling" do
stream = Stream.unfold(5, fn i -> {to_string(100 / i), i - 1} end)
try do
[Zstream.entry("numbers", stream, coder: MockCoder)]
|> Zstream.zip()
|> Stream.run()
rescue
ArithmeticError -> :ok
end
assert_received :closed
end
defp verify(entries) do
compressed =
Zstream.zip(entries)
|> as_binary
{:ok, decoded_entries} = :zip.unzip(compressed, [:memory, :verbose])
decoded_entries =
Enum.map(decoded_entries, fn {filename, data} ->
if String.to_integer(System.otp_release()) > 22 do
{IO.chardata_to_string(filename), data}
else
{IO.iodata_to_binary(filename), data}
end
end)
entries = Enum.reject(entries, fn e -> String.ends_with?(e.name, "/") end)
assert length(entries) == length(decoded_entries)
entries = Enum.sort_by(entries, & &1.name)
decoded_entries = Enum.sort_by(decoded_entries, fn {name, _} -> name end)
Enum.zip(entries, decoded_entries)
|> Enum.each(fn {entry, {decoded_filename, decoded_data}} ->
assert entry.name == decoded_filename
assert as_binary(entry.stream) == decoded_data
end)
verify_using_os_binary(entries)
verify_using_os_binary(entries, zip64: true)
end
defp verify_password(entries, password) do
verify_password_with_options(entries, password)
verify_password_with_options(entries, password, zip64: true)
end
defp verify_password_with_options(entries, password, options \\ []) do
Temp.track!()
path = Temp.path!(%{suffix: ".zip"})
Zstream.zip(entries, options)
|> Stream.into(File.stream!(path))
|> Stream.run()
{response, exit_code} = System.cmd("zipinfo", [path])
Logger.debug(response)
assert exit_code == 0
{response, exit_code} = System.cmd("unzip", ["-P", password, "-t", path])
Logger.debug(response)
assert exit_code == 0
File.rm!(path)
end
defp verify_using_os_binary(entries, options \\ []) do
Temp.track!()
path = Temp.path!(%{suffix: ".zip"})
Zstream.zip(entries, options)
|> Stream.into(File.stream!(path))
|> Stream.run()
{response, exit_code} = System.cmd("zipinfo", [path])
Logger.debug(response)
assert exit_code == 0
{response, exit_code} = System.cmd("unzip", ["-t", path])
Logger.debug(response)
assert exit_code == 0
File.rm!(path)
end
defp verify_unzip(path) do
file(path <> "/archive.zip")
|> Zstream.unzip()
|> Enum.reduce(
%{buffer: "", file_name: nil},
fn
{:entry, %Zstream.Entry{name: file_name} = entry}, state ->
Logger.info(inspect(entry))
state = put_in(state.file_name, file_name)
put_in(state.buffer, "")
{:data, :eof}, state ->
unless String.ends_with?(state.file_name, "/") do
actual = IO.iodata_to_binary(state.buffer)
expected =
File.read!(Path.join([__DIR__, "fixture", path, "inflated", state.file_name]))
assert actual == expected
end
state
{:data, data}, state ->
put_in(state.buffer, [state.buffer, data])
end
)
end
defp verify_unzip_error(path, error) do
assert_raise Zstream.Unzip.Error, error, fn ->
file(path)
|> Zstream.unzip()
|> Enum.to_list()
end
end
defp as_binary(stream) do
stream
|> Enum.to_list()
|> IO.iodata_to_binary()
end
defp file(name) do
File.stream!(Path.join([__DIR__, "fixture", name]), [], 100)
end
def random_bytes() do
:crypto.strong_rand_bytes(1024 * 1024)
end
def assert_memory do
total = (:erlang.memory() |> Keyword.fetch!(:total)) / (1024 * 1024)
Logger.debug("Total memory: #{total}")
assert total < 150
end
end
| 25.350769 | 92 | 0.612574 |
9eebf731830468670fd6ee229a32b3b814c0c33d | 632 | ex | Elixir | lib/aws_ex_ray/client.ex | lyokato/aws_ex_ray | f2413e9ac0d248da8960eef548418b1a2a906aff | [
"MIT"
] | 10 | 2018-06-17T03:20:59.000Z | 2022-03-01T18:34:55.000Z | lib/aws_ex_ray/client.ex | lyokato/aws_ex_ray | f2413e9ac0d248da8960eef548418b1a2a906aff | [
"MIT"
] | 9 | 2018-05-29T05:26:24.000Z | 2021-12-11T15:29:38.000Z | lib/aws_ex_ray/client.ex | lyokato/aws_ex_ray | f2413e9ac0d248da8960eef548418b1a2a906aff | [
"MIT"
] | 9 | 2018-06-11T06:15:58.000Z | 2022-01-20T10:07:36.000Z | defmodule AwsExRay.Client do
@moduledoc ~S"""
This module is a facade interface to send report-data to
xray-daemons running on your localhost.
"""
defmodule Behaviour do
@moduledoc ~S"""
Real client requires IO, so to enable mocking
on test environment, provides a interface and swithable modules.
"""
@callback send(data :: binary) :: term
end
alias AwsExRay.Config
@spec child_spec(any) :: Supervisor.child_spec
def child_spec(opts) do
Config.client_module.child_spec(opts)
end
@spec send(binary) :: :ok
def send(data) do
Config.client_module.send(data)
:ok
end
end
| 21.066667 | 68 | 0.69462 |
9eec2c15f5af36c25a1802e5316d949a95e98a3c | 887 | exs | Elixir | mix.exs | carmaproject/json_elixir | ef88a5a8d56ef61b5e9c7346699122cf022f842f | [
"MIT"
] | null | null | null | mix.exs | carmaproject/json_elixir | ef88a5a8d56ef61b5e9c7346699122cf022f842f | [
"MIT"
] | 1 | 2021-07-16T11:53:31.000Z | 2021-07-16T11:53:31.000Z | mix.exs | carmaproject/json_elixir | ef88a5a8d56ef61b5e9c7346699122cf022f842f | [
"MIT"
] | null | null | null | defmodule JsonElixir.MixProject do
use Mix.Project
def project do
[
app: :json_elixir,
version: "0.1.2",
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps(),
description: description(),
package: package(),
source_url: "https://github.com/carmaproject/json_elixir"
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:json, "~> 1.4"},
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false}
]
end
def package do
[
name: "ex_json",
maintainers: "hassanRSiddiqi",
organization: "carmaproject",
licenses: ["MIT License"]
]
end
defp description() do
"convert raw json into html."
end
end
| 19.711111 | 63 | 0.583991 |
9eec300fd18053e2a648d1190e4aacc1855b0e2b | 987 | exs | Elixir | test/support/apps/example/config/config.exs | mitchellhenke/torch | 2d0ab68f4e2d7f3bc37fbf7edbd1298b29b36e71 | [
"MIT"
] | 528 | 2019-09-13T15:10:36.000Z | 2022-03-31T10:28:27.000Z | test/support/apps/example/config/config.exs | mitchellhenke/torch | 2d0ab68f4e2d7f3bc37fbf7edbd1298b29b36e71 | [
"MIT"
] | 133 | 2019-09-13T17:46:59.000Z | 2022-03-01T13:37:10.000Z | test/support/apps/example/config/config.exs | mitchellhenke/torch | 2d0ab68f4e2d7f3bc37fbf7edbd1298b29b36e71 | [
"MIT"
] | 38 | 2019-10-29T20:37:13.000Z | 2022-03-03T05:19:33.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :example, ecto_repos: [Example.Repo]
# Configures the endpoint
config :example, ExampleWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "xcieGgutB9vi+UOSP+9CYZ9wLdSXUYPpwxEUDGjmOA3E/f6DvKWSwv/O+f9tHZvQ",
render_errors: [view: ExampleWeb.ErrorView, accepts: ~w(html json)],
pubsub: [name: Example.PubSub, adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
config :torch,
otp_app: :example,
template_format: "eex"
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 32.9 | 86 | 0.763931 |
9eec63acb03481d7c6a622f8484bac412fef9543 | 2,187 | ex | Elixir | lib/advent_of_code_2019/day16.ex | scorphus/advent-of-code-2019 | 48305ff3b13b23cac60bed02349775d8feb05a3b | [
"BSD-3-Clause"
] | null | null | null | lib/advent_of_code_2019/day16.ex | scorphus/advent-of-code-2019 | 48305ff3b13b23cac60bed02349775d8feb05a3b | [
"BSD-3-Clause"
] | null | null | null | lib/advent_of_code_2019/day16.ex | scorphus/advent-of-code-2019 | 48305ff3b13b23cac60bed02349775d8feb05a3b | [
"BSD-3-Clause"
] | null | null | null | defmodule AdventOfCode2019.FlawedFrequencyTransmission do
@moduledoc """
Day 16 — https://adventofcode.com/2019/day/16
"""
@base_pattern [0, 1, 0, -1]
@spec part1(Enumerable.t()) :: binary
def part1(in_stream) do
in_stream
|> read_input_signal()
|> repeat_phases(100)
|> Stream.take(8)
|> Enum.join()
end
@spec part2(Enumerable.t()) :: binary
def part2(in_stream) do
input_signal =
in_stream
|> read_input_signal()
offset =
Enum.slice(input_signal, 0, 7)
|> Enum.join()
|> String.to_integer()
repeat_phases(input_signal, 100, 10_000 * length(input_signal), offset)
|> Stream.take(8)
|> Enum.join()
end
@spec read_input_signal(Enumerable.t()) :: list
defp read_input_signal(in_stream) do
in_stream
|> Enum.take(1)
|> List.first()
|> String.trim()
|> String.graphemes()
|> Enum.map(&String.to_integer/1)
end
defp repeat_phases(input_signal, phases) do
Enum.reduce(1..phases, input_signal, &run_phases/2)
end
defp repeat_phases(input_signal, phases, size, offset) do
Stream.cycle(input_signal)
|> Stream.drop(offset)
|> Enum.take(size - offset)
|> Stream.iterate(&run_phases/1)
|> Enum.at(phases)
end
defp run_phases(_phase, input_signal) do
Stream.transform(1..length(input_signal), input_signal, &output_signal/2)
|> Enum.to_list()
end
defp run_phases(input_signal) do
{input_signal, _} = output_signal(input_signal)
input_signal
end
defp output_signal(i, input_signal) do
{[
Stream.zip(input_signal, repeat_pattern(i))
|> Stream.map(fn {a, b} -> a * b end)
|> Enum.sum()
|> rem(10)
|> abs()
], input_signal}
end
defp output_signal([n]), do: {[n], n}
defp output_signal([n | tail]) do
{tail, sum} = output_signal(tail)
{[rem(n + sum, 10) | tail], n + sum}
end
defp repeat_pattern(n) do
Stream.unfold({1, n, @base_pattern}, fn
{_, _, []} -> nil
{n, n, [head | tail]} -> {head, {1, n, tail}}
{i, n, [head | tail]} -> {head, {i + 1, n, [head | tail]}}
end)
|> Stream.cycle()
|> Stream.drop(1)
end
end
| 23.771739 | 77 | 0.609511 |
9eec6abde9d4a990b0e8e7ce55f341fdba322d25 | 1,966 | ex | Elixir | lib/router.ex | lee-dohm/staff-notes | 07186e8407f1955876fa2dee2dbbfd0bbac91333 | [
"MIT"
] | 1 | 2020-01-26T18:08:40.000Z | 2020-01-26T18:08:40.000Z | lib/router.ex | lee-dohm/staff-notes | 07186e8407f1955876fa2dee2dbbfd0bbac91333 | [
"MIT"
] | 36 | 2017-12-23T20:22:07.000Z | 2018-05-10T09:16:59.000Z | lib/router.ex | lee-dohm/staff-notes | 07186e8407f1955876fa2dee2dbbfd0bbac91333 | [
"MIT"
] | null | null | null | defmodule StaffNotesWeb.Router do
@moduledoc """
Router for the `StaffNotesWeb` application.
"""
use StaffNotesWeb, :router
alias Plug.Ribbon
alias StaffNotesApi.TokenAuthentication
alias StaffNotesWeb.SlidingSessionTimeout
pipeline :browser do
plug(:accepts, ["html"])
plug(:fetch_session)
plug(:fetch_flash)
plug(:protect_from_forgery)
plug(:put_secure_browser_headers)
plug(SlidingSessionTimeout)
plug(:assign_current_user)
plug(Ribbon, [:dev, :test])
end
pipeline :api do
plug(:accepts, ["json"])
plug(TokenAuthentication)
end
scope "/", StaffNotesWeb do
# Use the default browser stack
pipe_through(:browser)
get("/", PageController, :index)
get("/about", PageController, :about)
resources("/users", UserController, only: [:show], param: "name")
resources "/orgs", OrganizationController, except: [:index], param: "name" do
resources("/members", MemberController, only: [:index, :show])
resources("/notes", NoteController, except: [:index])
resources("/staff", StaffController, only: [:index])
resources("/teams", TeamController, param: "name")
end
end
scope "/api", StaffNotesApi do
pipe_through(:api)
post("/files", FileController, :create)
post("/markdown", MarkdownController, :render)
end
scope "/auth", StaffNotesWeb do
pipe_through(:browser)
get("/", AuthController, :index)
get("/callback", AuthController, :callback)
get("/logout", AuthController, :delete)
end
@doc """
Fetch the current user from the session and add it to the assigns of the `Plug.Conn`.
This allows access to the currently signed in user in views as `@current_user`. If no user is
logged in, `@current_user` will be `nil`.
"""
@spec assign_current_user(Plug.Conn.t(), any) :: Plug.Conn.t()
def assign_current_user(conn, _) do
assign(conn, :current_user, get_session(conn, :current_user))
end
end
| 28.085714 | 95 | 0.683113 |
9eec6fe929f491fbfcf97590a51e9474cae53044 | 737 | ex | Elixir | lib/etcetera/utils.ex | praekelt/etcetera | 24c8bc13267eee248b8117179f886f1e18658803 | [
"BSD-3-Clause"
] | null | null | null | lib/etcetera/utils.ex | praekelt/etcetera | 24c8bc13267eee248b8117179f886f1e18658803 | [
"BSD-3-Clause"
] | null | null | null | lib/etcetera/utils.ex | praekelt/etcetera | 24c8bc13267eee248b8117179f886f1e18658803 | [
"BSD-3-Clause"
] | null | null | null | defmodule Etcetera.Utils do
@moduledoc """
Miscellaneous utility functions.
"""
@doc """
Removes the leading and trailing slashes from the given string.
## Examples
iex> Etcetera.Utils.remove_slashes("////a/b/c/////")
"a/b/c"
iex> Etcetera.Utils.remove_slashes("a/b/c")
"a/b/c"
"""
def remove_slashes(input_text) do
input_text
|> String.replace_trailing("/", "")
|> String.replace_leading("/", "")
end
def get_etcd_url(path, version \\ "v2") do
host = remove_slashes(Etcetera.etcd_host)
port = Etcetera.etcd_port
prefix = remove_slashes(Etcetera.etcd_prefix)
path = remove_slashes(path)
"#{host}:#{port}/#{version}/keys/#{prefix}/#{path}"
end
end
| 24.566667 | 65 | 0.630936 |
9eeccbfbfdcdbd7ca87eabc3968ed0c1b2dfc7a6 | 8,264 | ex | Elixir | clients/vector_tile/lib/google_api/vector_tile/v1/api/featuretiles.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/vector_tile/lib/google_api/vector_tile/v1/api/featuretiles.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/vector_tile/lib/google_api/vector_tile/v1/api/featuretiles.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.VectorTile.V1.Api.Featuretiles do
@moduledoc """
API calls for all endpoints tagged `Featuretiles`.
"""
alias GoogleApi.VectorTile.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Gets a feature tile by its tile resource name.
## Parameters
* `connection` (*type:* `GoogleApi.VectorTile.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. Resource name of the tile. The tile resource name is prefixed by its collection ID `tiles/` followed by the resource ID, which encodes the tile's global x and y coordinates and zoom level as `@,,z`. For example, `tiles/@1,2,3z`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:"clientInfo.apiClient"` (*type:* `String.t`) - API client name and version. For example, the SDK calling the API. The exact format is up to the client.
* `:"clientInfo.applicationId"` (*type:* `String.t`) - Application ID, such as the package name on Android and the bundle identifier on iOS platforms.
* `:"clientInfo.applicationVersion"` (*type:* `String.t`) - Application version number, such as "1.2.3". The exact format is application-dependent.
* `:"clientInfo.deviceModel"` (*type:* `String.t`) - Device model as reported by the device. The exact format is platform-dependent.
* `:"clientInfo.operatingSystem"` (*type:* `String.t`) - Operating system name and version as reported by the OS. For example, "Mac OS X 10.10.4". The exact format is platform-dependent.
* `:"clientInfo.platform"` (*type:* `String.t`) - Platform where the application is running.
* `:"clientInfo.userId"` (*type:* `String.t`) - Required. A client-generated user ID. The ID should be generated and persisted during the first user session or whenever a pre-existing ID is not found. The exact format is up to the client. This must be non-empty in a GetFeatureTileRequest (whether via the header or GetFeatureTileRequest.client_info).
* `:clientTileVersionId` (*type:* `String.t`) - Optional version id identifying the tile that is already in the client's cache. This field should be populated with the most recent version_id value returned by the API for the requested tile. If the version id is empty the server always returns a newly rendered tile. If it is provided the server checks if the tile contents would be identical to one that's already on the client, and if so, returns a stripped-down response tile with STATUS_OK_DATA_UNCHANGED instead.
* `:enableDetailedHighwayTypes` (*type:* `boolean()`) - Flag indicating whether detailed highway types should be returned. If this is set, the CONTROLLED_ACCESS_HIGHWAY type may be returned. If not, then these highways will have the generic HIGHWAY type. This exists for backwards compatibility reasons.
* `:enableFeatureNames` (*type:* `boolean()`) - Flag indicating whether human-readable names should be returned for features. If this is set, the display_name field on the feature will be filled out.
* `:enableModeledVolumes` (*type:* `boolean()`) - Flag indicating whether 3D building models should be enabled. If this is set structures will be returned as 3D modeled volumes rather than 2.5D extruded areas where possible.
* `:enablePoliticalFeatures` (*type:* `boolean()`) - Flag indicating whether political features should be returned.
* `:enablePrivateRoads` (*type:* `boolean()`) - Flag indicating whether the returned tile will contain road features that are marked private. Private roads are indicated by the Feature.segment_info.road_info.is_private field.
* `:enableUnclippedBuildings` (*type:* `boolean()`) - Flag indicating whether unclipped buildings should be returned. If this is set, building render ops will extend beyond the tile boundary. Buildings will only be returned on the tile that contains their centroid.
* `:languageCode` (*type:* `String.t`) - Required. The BCP-47 language code corresponding to the language in which the name was requested, such as "en-US" or "sr-Latn". For more information, see http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
* `:regionCode` (*type:* `String.t`) - Required. The Unicode country/region code (CLDR) of the location from which the request is coming from, such as "US" and "419". For more information, see http://www.unicode.org/reports/tr35/#unicode_region_subtag.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.VectorTile.V1.Model.FeatureTile{}}` on success
* `{:error, info}` on failure
"""
@spec vectortile_featuretiles_get(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.VectorTile.V1.Model.FeatureTile.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def vectortile_featuretiles_get(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:"clientInfo.apiClient" => :query,
:"clientInfo.applicationId" => :query,
:"clientInfo.applicationVersion" => :query,
:"clientInfo.deviceModel" => :query,
:"clientInfo.operatingSystem" => :query,
:"clientInfo.platform" => :query,
:"clientInfo.userId" => :query,
:clientTileVersionId => :query,
:enableDetailedHighwayTypes => :query,
:enableFeatureNames => :query,
:enableModeledVolumes => :query,
:enablePoliticalFeatures => :query,
:enablePrivateRoads => :query,
:enableUnclippedBuildings => :query,
:languageCode => :query,
:regionCode => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.VectorTile.V1.Model.FeatureTile{}])
end
end
| 69.445378 | 525 | 0.689376 |
9eecdf34c283b16aa06fbec02d77975e9dff9999 | 1,166 | ex | Elixir | quickly/web/channels/user_socket.ex | hectorip/quickly | 99a4b2720b34591c5ba19bdcf8b517ce11b4c0ad | [
"MIT"
] | null | null | null | quickly/web/channels/user_socket.ex | hectorip/quickly | 99a4b2720b34591c5ba19bdcf8b517ce11b4c0ad | [
"MIT"
] | null | null | null | quickly/web/channels/user_socket.ex | hectorip/quickly | 99a4b2720b34591c5ba19bdcf8b517ce11b4c0ad | [
"MIT"
] | null | null | null | defmodule Quickly.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", Quickly.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "users_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# Quickly.Endpoint.broadcast("users_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 30.684211 | 83 | 0.701544 |
9eece710e34c28bed601f6216387669d0e9ca360 | 2,117 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v3/model/google_cloud_dialogflow_v2beta1_intent_message_simple_response.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dialogflow/lib/google_api/dialogflow/v3/model/google_cloud_dialogflow_v2beta1_intent_message_simple_response.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dialogflow/lib/google_api/dialogflow/v3/model/google_cloud_dialogflow_v2beta1_intent_message_simple_response.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowV2beta1IntentMessageSimpleResponse do
@moduledoc """
The simple response message containing speech or text.
## Attributes
* `displayText` (*type:* `String.t`, *default:* `nil`) - Optional. The text to display.
* `ssml` (*type:* `String.t`, *default:* `nil`) - One of text_to_speech or ssml must be provided. Structured spoken response to the user in the SSML format. Mutually exclusive with text_to_speech.
* `textToSpeech` (*type:* `String.t`, *default:* `nil`) - One of text_to_speech or ssml must be provided. The plain text of the speech output. Mutually exclusive with ssml.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:displayText => String.t() | nil,
:ssml => String.t() | nil,
:textToSpeech => String.t() | nil
}
field(:displayText)
field(:ssml)
field(:textToSpeech)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowV2beta1IntentMessageSimpleResponse do
def decode(value, options) do
GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowV2beta1IntentMessageSimpleResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowV2beta1IntentMessageSimpleResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.5 | 200 | 0.735002 |
9eed27662e36ff531109cc865bd4fd2db98a104c | 579 | exs | Elixir | elixir/priv/repo/migrations/20200826196533_create_transactions.exs | TreywRoberts/web-homework | d19b17dd384341d9e6e7e3174372673584289b83 | [
"MIT"
] | null | null | null | elixir/priv/repo/migrations/20200826196533_create_transactions.exs | TreywRoberts/web-homework | d19b17dd384341d9e6e7e3174372673584289b83 | [
"MIT"
] | null | null | null | elixir/priv/repo/migrations/20200826196533_create_transactions.exs | TreywRoberts/web-homework | d19b17dd384341d9e6e7e3174372673584289b83 | [
"MIT"
] | null | null | null | defmodule Homework.Repo.Migrations.CreateTransactions do
use Ecto.Migration
def change do
create table(:transactions, primary_key: false) do
add(:id, :uuid, primary_key: true)
add(:amount, :integer)
add(:credit, :boolean, default: false, null: false)
add(:debit, :boolean, default: false, null: false)
add(:description, :string)
add(:user_id, references(:users, type: :uuid, on_delete: :nothing))
add(:merchant_id, references(:merchants, type: :uuid, on_delete: :nothing))
timestamps()
end
end
end
| 32.166667 | 82 | 0.651123 |
9eed2e874ffd71c96db45f293803727d612ca77c | 9,140 | ex | Elixir | lib/livebook_web/live/home_live.ex | akash-akya/livebook | c3a0b7c01d76e8c646d4d1db0c49f1401ca80153 | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/live/home_live.ex | akash-akya/livebook | c3a0b7c01d76e8c646d4d1db0c49f1401ca80153 | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/live/home_live.ex | akash-akya/livebook | c3a0b7c01d76e8c646d4d1db0c49f1401ca80153 | [
"Apache-2.0"
] | null | null | null | defmodule LivebookWeb.HomeLive do
use LivebookWeb, :live_view
alias Livebook.{SessionSupervisor, Session, LiveMarkdown, Notebook}
@impl true
def mount(_params, _session, socket) do
if connected?(socket) do
Phoenix.PubSub.subscribe(Livebook.PubSub, "sessions")
end
session_summaries = sort_session_summaries(SessionSupervisor.get_session_summaries())
{:ok, assign(socket, path: default_path(), session_summaries: session_summaries)}
end
@impl true
def render(assigns) do
~L"""
<div class="flex flex-grow h-full">
<div class="flex-grow px-6 py-8 overflow-y-auto">
<div class="max-w-screen-lg w-full mx-auto p-4 pt-0 pb-8 flex flex-col items-center space-y-4">
<div class="w-full flex flex-col space-y-2 items-center sm:flex-row sm:space-y-0 sm:justify-between sm:pb-4 pb-8 border-b border-gray-200">
<div class="text-2xl text-gray-800 font-semibold">
<img src="/logo-with-text.png" class="h-[50px]" alt="Livebook" />
</div>
<div class="flex space-x-2 pt-2">
<span class="tooltip top" aria-label="Introduction">
<button class="button button-outlined-gray button-square-icon"
phx-click="open_welcome">
<%= remix_icon("compass-line") %>
</button>
</span>
<%= live_patch to: Routes.home_path(@socket, :import, "url"),
class: "button button-outlined-gray whitespace-nowrap" do %>
Import
<% end %>
<button class="button button-blue"
phx-click="new">
New notebook
</button>
</div>
</div>
<div class="w-full h-80">
<%= live_component @socket, LivebookWeb.PathSelectComponent,
id: "path_select",
path: @path,
extnames: [LiveMarkdown.extension()],
running_paths: paths(@session_summaries),
phx_target: nil,
phx_submit: nil do %>
<div class="flex justify-end space-x-2">
<%= content_tag :button,
class: "button button-outlined-gray whitespace-nowrap",
phx_click: "fork",
disabled: not path_forkable?(@path) do %>
<%= remix_icon("git-branch-line", class: "align-middle mr-1") %>
<span>Fork</span>
<% end %>
<%= if path_running?(@path, @session_summaries) do %>
<%= live_redirect "Join session", to: Routes.session_path(@socket, :page, session_id_by_path(@path, @session_summaries)),
class: "button button-blue" %>
<% else %>
<%= tag :span, if(File.regular?(@path) and not file_writable?(@path),
do: [class: "tooltip top", aria_label: "This file is write-protected, please fork instead"],
else: []
) %>
<%= content_tag :button, "Open",
class: "button button-blue",
phx_click: "open",
disabled: not path_openable?(@path, @session_summaries) %>
</span>
<% end %>
</div>
<% end %>
</div>
<div class="w-full py-12">
<h3 class="text-xl font-semibold text-gray-800 mb-5">
Running sessions
</h3>
<%= if @session_summaries == [] do %>
<div class="p-5 flex space-x-4 items-center border border-gray-200 rounded-lg">
<div>
<%= remix_icon("windy-line", class: "text-gray-400 text-xl") %>
</div>
<div class="text-gray-600">
You do not have any running sessions.
<br>
Please create a new one by clicking <span class="font-semibold">“New notebook”</span>
</div>
</div>
<% else %>
<%= live_component @socket, LivebookWeb.HomeLive.SessionsComponent,
id: "sessions_list",
session_summaries: @session_summaries %>
<% end %>
</div>
</div>
</div>
</div>
<%= if @live_action == :close_session do %>
<%= live_modal @socket, LivebookWeb.HomeLive.CloseSessionComponent,
id: :close_session_modal,
return_to: Routes.home_path(@socket, :page),
session_summary: @session_summary %>
<% end %>
<%= if @live_action == :import do %>
<%= live_modal @socket, LivebookWeb.HomeLive.ImportComponent,
id: :import_modal,
return_to: Routes.home_path(@socket, :page),
tab: @tab %>
<% end %>
"""
end
@impl true
def handle_params(%{"session_id" => session_id}, _url, socket) do
session_summary = Enum.find(socket.assigns.session_summaries, &(&1.session_id == session_id))
{:noreply, assign(socket, session_summary: session_summary)}
end
def handle_params(%{"tab" => tab}, _url, socket) do
{:noreply, assign(socket, tab: tab)}
end
def handle_params(_params, _url, socket), do: {:noreply, socket}
@impl true
def handle_event("set_path", %{"path" => path}, socket) do
{:noreply, assign(socket, path: path)}
end
def handle_event("open_welcome", %{}, socket) do
create_session(socket, notebook: Livebook.Notebook.Welcome.new())
end
def handle_event("new", %{}, socket) do
create_session(socket)
end
def handle_event("fork", %{}, socket) do
{notebook, messages} = import_notebook(socket.assigns.path)
socket = put_import_flash_messages(socket, messages)
notebook = Notebook.forked(notebook)
images_dir = Session.images_dir_for_notebook(socket.assigns.path)
create_session(socket, notebook: notebook, copy_images_from: images_dir)
end
def handle_event("open", %{}, socket) do
{notebook, messages} = import_notebook(socket.assigns.path)
socket = put_import_flash_messages(socket, messages)
create_session(socket, notebook: notebook, path: socket.assigns.path)
end
def handle_event("fork_session", %{"id" => session_id}, socket) do
data = Session.get_data(session_id)
notebook = Notebook.forked(data.notebook)
%{images_dir: images_dir} = Session.get_summary(session_id)
create_session(socket, notebook: notebook, copy_images_from: images_dir)
end
@impl true
def handle_info({:session_created, id}, socket) do
summary = Session.get_summary(id)
session_summaries = sort_session_summaries([summary | socket.assigns.session_summaries])
{:noreply, assign(socket, session_summaries: session_summaries)}
end
def handle_info({:session_closed, id}, socket) do
session_summaries = Enum.reject(socket.assigns.session_summaries, &(&1.session_id == id))
{:noreply, assign(socket, session_summaries: session_summaries)}
end
def handle_info({:import_content, content}, socket) do
{notebook, messages} = Livebook.LiveMarkdown.Import.notebook_from_markdown(content)
socket = put_import_flash_messages(socket, messages)
create_session(socket, notebook: notebook)
end
def handle_info(_message, socket), do: {:noreply, socket}
defp default_path(), do: Livebook.Config.root_path() <> "/"
defp sort_session_summaries(session_summaries) do
Enum.sort_by(session_summaries, & &1.notebook_name)
end
defp paths(session_summaries) do
Enum.map(session_summaries, & &1.path)
end
defp path_forkable?(path) do
File.regular?(path)
end
defp path_openable?(path, session_summaries) do
File.regular?(path) and not path_running?(path, session_summaries) and file_writable?(path)
end
defp path_running?(path, session_summaries) do
running_paths = paths(session_summaries)
path in running_paths
end
defp file_writable?(path) do
case File.stat(path) do
{:ok, stat} -> stat.access in [:read_write, :write]
{:error, _} -> false
end
end
defp create_session(socket, opts \\ []) do
case SessionSupervisor.create_session(opts) do
{:ok, id} ->
{:noreply, push_redirect(socket, to: Routes.session_path(socket, :page, id))}
{:error, reason} ->
{:noreply, put_flash(socket, :error, "Failed to create a notebook: #{reason}")}
end
end
defp import_notebook(path) do
content = File.read!(path)
LiveMarkdown.Import.notebook_from_markdown(content)
end
defp put_import_flash_messages(socket, []), do: socket
defp put_import_flash_messages(socket, messages) do
list =
messages
|> Enum.map(fn message -> ["- ", message] end)
|> Enum.intersperse("\n")
flash =
IO.iodata_to_binary([
"We found problems while importing the file and tried to autofix them:\n" | list
])
put_flash(socket, :info, flash)
end
defp session_id_by_path(path, session_summaries) do
summary = Enum.find(session_summaries, &(&1.path == path))
summary.session_id
end
end
| 36.56 | 149 | 0.604923 |
9eed431eb60c6f09bc24b9d74b064d52a222215e | 2,041 | exs | Elixir | test/test_helper.exs | elpikel/fun_with_flags | 1c409e8872eb7e370f827a0d471082aeff09cf33 | [
"MIT"
] | 736 | 2017-02-16T12:55:57.000Z | 2022-03-31T19:50:43.000Z | test/test_helper.exs | elpikel/fun_with_flags | 1c409e8872eb7e370f827a0d471082aeff09cf33 | [
"MIT"
] | 86 | 2017-04-02T16:42:33.000Z | 2022-03-25T01:07:13.000Z | test/test_helper.exs | elpikel/fun_with_flags | 1c409e8872eb7e370f827a0d471082aeff09cf33 | [
"MIT"
] | 63 | 2017-04-28T23:38:15.000Z | 2022-03-28T12:49:02.000Z | # If we are not using Ecto and we're not using Phoenix.PubSub, then
# we need a Redis instance for either persistence or PubSub.
does_anything_need_redis = !(
FunWithFlags.Config.persist_in_ecto? && FunWithFlags.Config.phoenix_pubsub?
)
if FunWithFlags.Config.phoenix_pubsub? do
# The Phoenix PubSub application must be running before we try to start our
# PubSub process and subscribe.
:ok = Application.ensure_started(:phoenix_pubsub)
# Start a Phoenix.PubSub process for the tests.
# The `:fwf_test` connection name will be injected into this
# library in `config/test.exs`.
children = [
{Phoenix.PubSub, [name: :fwf_test, adapter: Phoenix.PubSub.PG2, pool_size: 1]}
]
opts = [strategy: :one_for_one, name: MyApp.Supervisor]
{:ok, _pid} = Supervisor.start_link(children, opts)
end
IO.puts "--------------------------------------------------------------"
IO.puts "$CACHE_ENABLED=#{System.get_env("CACHE_ENABLED")}"
IO.puts "$PERSISTENCE=#{System.get_env("PERSISTENCE")}"
IO.puts "$RDBMS=#{System.get_env("RDBMS")}"
IO.puts "$PUBSUB_BROKER=#{System.get_env("PUBSUB_BROKER")}"
IO.puts "--------------------------------------------------------------"
IO.puts "Logger level: #{inspect(Logger.level())}"
IO.puts "Cache enabled: #{inspect(FunWithFlags.Config.cache?)}"
IO.puts "Persistence adapter: #{inspect(FunWithFlags.Config.persistence_adapter())}"
IO.puts "RDBMS driver: #{inspect(if FunWithFlags.Config.persist_in_ecto?, do: FunWithFlags.Dev.EctoRepo.__adapter__(), else: nil)}"
IO.puts "Notifications adapter: #{inspect(FunWithFlags.Config.notifications_adapter())}"
IO.puts "Anything using Redis: #{inspect(does_anything_need_redis)}"
IO.puts "--------------------------------------------------------------"
if does_anything_need_redis do
FunWithFlags.TestUtils.use_redis_test_db()
end
ExUnit.start()
if FunWithFlags.Config.persist_in_ecto? do
{:ok, _pid} = FunWithFlags.Dev.EctoRepo.start_link()
Ecto.Adapters.SQL.Sandbox.mode(FunWithFlags.Dev.EctoRepo, :manual)
end
| 43.425532 | 140 | 0.675159 |
9eed4cdc92e6f1990d274d9156f58adb5b32867a | 10,189 | ex | Elixir | lib/axon/metrics.ex | kamidev/axon | 2db36f4142f6b1ee66e8b24f5c83fd2f95b49f5e | [
"Apache-2.0"
] | null | null | null | lib/axon/metrics.ex | kamidev/axon | 2db36f4142f6b1ee66e8b24f5c83fd2f95b49f5e | [
"Apache-2.0"
] | null | null | null | lib/axon/metrics.ex | kamidev/axon | 2db36f4142f6b1ee66e8b24f5c83fd2f95b49f5e | [
"Apache-2.0"
] | null | null | null | defmodule Axon.Metrics do
@moduledoc """
Metric functions.
Metrics are used to measure the performance and compare
performance of models in easy-to-understand terms. Often
times, neural networks use surrogate loss functions such
as negative log-likelihood to indirectly optimize a certain
performance metric. Metrics such as accuracy, also called
the 0-1 loss, do not have useful derivatives (e.g. they
are information sparse), and are often intractable even
with low input dimensions.
Despite not being able to train specifically for certain
metrics, it's still useful to track these metrics to
monitor the performance of a neural network during training.
Metrics such as accuracy provide useful feedback during
training, whereas loss can sometimes be difficult to interpret.
All of the functions in this module are implemented as
numerical functions and can be JIT or AOT compiled with
any supported `Nx` compiler.
"""
import Nx.Defn
# Standard Metrics
@doc ~S"""
Computes the accuracy of the given predictions.
If the size of the last axis is 1, it performs a binary
accuracy computation with a threshold of 0.5. Otherwise,
computes categorical accuracy.
## Argument Shapes
* `y_true` - $\(d_0, d_1, ..., d_n\)$
* `y_pred` - $\(d_0, d_1, ..., d_n\)$
## Examples
iex> Axon.Metrics.accuracy(Nx.tensor([[1], [0], [0]]), Nx.tensor([[1], [1], [1]]))
#Nx.Tensor<
f32
0.3333333432674408
>
iex> Axon.Metrics.accuracy(Nx.tensor([[0, 1], [1, 0], [1, 0]]), Nx.tensor([[0, 1], [1, 0], [0, 1]]))
#Nx.Tensor<
f32
0.6666666865348816
>
iex> Axon.Metrics.accuracy(Nx.tensor([[0, 1, 0], [1, 0, 0]]), Nx.tensor([[0, 1, 0], [0, 1, 0]]))
#Nx.Tensor<
f32
0.5
>
"""
defn accuracy(y_true, y_pred) do
if elem(Nx.shape(y_pred), Nx.rank(y_pred) - 1) == 1 do
y_pred
|> Nx.greater(0.5)
|> Nx.equal(y_true)
|> Nx.mean()
else
y_true
|> Nx.argmax(axis: -1)
|> Nx.equal(Nx.argmax(y_pred, axis: -1))
|> Nx.mean()
end
end
@doc ~S"""
Computes the precision of the given predictions with
respect to the given targets.
## Argument Shapes
* `y_true` - $\(d_0, d_1, ..., d_n\)$
* `y_pred` - $\(d_0, d_1, ..., d_n\)$
## Options
* `:threshold` - threshold for truth value of the predictions.
Defaults to `0.5`
## Examples
iex> Axon.Metrics.precision(Nx.tensor([0, 1, 1, 1]), Nx.tensor([1, 0, 1, 1]))
#Nx.Tensor<
f32
0.6666666865348816
>
"""
defn precision(y_true, y_pred, opts \\ []) do
true_positives = true_positives(y_true, y_pred, opts)
false_positives = false_positives(y_true, y_pred, opts)
true_positives
|> Nx.divide(true_positives + false_positives + 1.0e-16)
end
@doc ~S"""
Computes the recall of the given predictions with
respect to the given targets.
## Argument Shapes
* `y_true` - $\(d_0, d_1, ..., d_n\)$
* `y_pred` - $\(d_0, d_1, ..., d_n\)$
## Options
* `:threshold` - threshold for truth value of the predictions.
Defaults to `0.5`
## Examples
iex> Axon.Metrics.recall(Nx.tensor([0, 1, 1, 1]), Nx.tensor([1, 0, 1, 1]))
#Nx.Tensor<
f32
0.6666666865348816
>
"""
defn recall(y_true, y_pred, opts \\ []) do
true_positives = true_positives(y_true, y_pred, opts)
false_negatives = false_negatives(y_true, y_pred, opts)
Nx.divide(true_positives, false_negatives + true_positives + 1.0e-16)
end
@doc """
Computes the number of true positive predictions with respect
to given targets.
## Options
* `:threshold` - threshold for truth value of predictions.
Defaults to `0.5`.
## Examples
iex> y_true = Nx.tensor([1, 0, 1, 1, 0, 1, 0])
iex> y_pred = Nx.tensor([0.8, 0.6, 0.4, 0.2, 0.8, 0.2, 0.2])
iex> Axon.Metrics.true_positives(y_true, y_pred)
#Nx.Tensor<
u64
1
>
"""
defn true_positives(y_true, y_pred, opts \\ []) do
opts = keyword!(opts, threshold: 0.5)
thresholded_preds =
y_pred
|> Nx.greater(opts[:threshold])
thresholded_preds
|> Nx.equal(y_true)
|> Nx.logical_and(Nx.equal(thresholded_preds, 1))
|> Nx.sum()
end
@doc """
Computes the number of false negative predictions with respect
to given targets.
## Options
* `:threshold` - threshold for truth value of predictions.
Defaults to `0.5`.
## Examples
iex> y_true = Nx.tensor([1, 0, 1, 1, 0, 1, 0])
iex> y_pred = Nx.tensor([0.8, 0.6, 0.4, 0.2, 0.8, 0.2, 0.2])
iex> Axon.Metrics.false_negatives(y_true, y_pred)
#Nx.Tensor<
u64
3
>
"""
defn false_negatives(y_true, y_pred, opts \\ []) do
opts = keyword!(opts, threshold: 0.5)
thresholded_preds =
y_pred
|> Nx.greater(opts[:threshold])
thresholded_preds
|> Nx.not_equal(y_true)
|> Nx.logical_and(Nx.equal(thresholded_preds, 0))
|> Nx.sum()
end
@doc """
Computes the number of true negative predictions with respect
to given targets.
## Options
* `:threshold` - threshold for truth value of predictions.
Defaults to `0.5`.
## Examples
iex> y_true = Nx.tensor([1, 0, 1, 1, 0, 1, 0])
iex> y_pred = Nx.tensor([0.8, 0.6, 0.4, 0.2, 0.8, 0.2, 0.2])
iex> Axon.Metrics.true_negatives(y_true, y_pred)
#Nx.Tensor<
u64
1
>
"""
defn true_negatives(y_true, y_pred, opts \\ []) do
opts = keyword!(opts, threshold: 0.5)
thresholded_preds =
y_pred
|> Nx.greater(opts[:threshold])
thresholded_preds
|> Nx.equal(y_true)
|> Nx.logical_and(Nx.equal(thresholded_preds, 0))
|> Nx.sum()
end
@doc """
Computes the number of false positive predictions with respect
to given targets.
## Options
* `:threshold` - threshold for truth value of predictions.
Defaults to `0.5`.
## Examples
iex> y_true = Nx.tensor([1, 0, 1, 1, 0, 1, 0])
iex> y_pred = Nx.tensor([0.8, 0.6, 0.4, 0.2, 0.8, 0.2, 0.2])
iex> Axon.Metrics.false_positives(y_true, y_pred)
#Nx.Tensor<
u64
2
>
"""
defn false_positives(y_true, y_pred, opts \\ []) do
opts = keyword!(opts, threshold: 0.5)
thresholded_preds =
y_pred
|> Nx.greater(opts[:threshold])
thresholded_preds
|> Nx.not_equal(y_true)
|> Nx.logical_and(Nx.equal(thresholded_preds, 1))
|> Nx.sum()
end
@doc ~S"""
Computes the sensitivity of the given predictions
with respect to the given targets.
## Argument Shapes
* `y_true` - $\(d_0, d_1, ..., d_n\)$
* `y_pred` - $\(d_0, d_1, ..., d_n\)$
## Options
* `:threshold` - threshold for truth value of the predictions.
Defaults to `0.5`
## Examples
iex> Axon.Metrics.sensitivity(Nx.tensor([0, 1, 1, 1]), Nx.tensor([1, 0, 1, 1]))
#Nx.Tensor<
f32
0.6666666865348816
>
"""
defn sensitivity(y_true, y_pred, opts \\ []) do
opts = keyword!(opts, threshold: 0.5)
recall(y_true, y_pred, opts)
end
@doc ~S"""
Computes the specificity of the given predictions
with respect to the given targets.
## Argument Shapes
* `y_true` - $\(d_0, d_1, ..., d_n\)$
* `y_pred` - $\(d_0, d_1, ..., d_n\)$
## Options
* `:threshold` - threshold for truth value of the predictions.
Defaults to `0.5`
## Examples
iex> Axon.Metrics.specificity(Nx.tensor([0, 1, 1, 1]), Nx.tensor([1, 0, 1, 1]))
#Nx.Tensor<
f32
0.0
>
"""
defn specificity(y_true, y_pred, opts \\ []) do
opts = keyword!(opts, threshold: 0.5)
thresholded_preds = Nx.greater(y_pred, opts[:threshold])
true_negatives =
thresholded_preds
|> Nx.equal(y_true)
|> Nx.logical_and(Nx.equal(thresholded_preds, 0))
|> Nx.sum()
false_positives =
thresholded_preds
|> Nx.not_equal(y_true)
|> Nx.logical_and(Nx.equal(thresholded_preds, 1))
|> Nx.sum()
Nx.divide(true_negatives, false_positives + true_negatives + 1.0e-16)
end
@doc ~S"""
Calculates the mean absolute error of predictions
with respect to targets.
$$l_i = \sum_i |\hat{y_i} - y_i|$$
## Argument Shapes
* `y_true` - $\(d_0, d_1, ..., d_n\)$
* `y_pred` - $\(d_0, d_1, ..., d_n\)$
## Examples
iex> y_true = Nx.tensor([[0.0, 1.0], [0.0, 0.0]], type: {:f, 32})
iex> y_pred = Nx.tensor([[1.0, 1.0], [1.0, 0.0]], type: {:f, 32})
iex> Axon.Metrics.mean_absolute_error(y_true, y_pred)
#Nx.Tensor<
f32
0.5
>
"""
defn mean_absolute_error(y_true, y_pred) do
y_true
|> Nx.subtract(y_pred)
|> Nx.abs()
|> Nx.mean()
end
# Combinators
@doc """
Returns a function which computes a running average given current average,
new observation, and current iteration.
## Examples
iex> cur_avg = 0.5
iex> iteration = 1
iex> y_true = Nx.tensor([[0, 1], [1, 0], [1, 0]])
iex> y_pred = Nx.tensor([[0, 1], [1, 0], [1, 0]])
iex> avg_acc = Axon.Metrics.running_average(&Axon.Metrics.accuracy/2)
iex> avg_acc.(cur_avg, [y_true, y_pred], iteration)
#Nx.Tensor<
f32
0.75
>
"""
def running_average(metric) do
&running_average_impl(&1, apply(metric, &2), &3)
end
defnp running_average_impl(avg, obs, i) do
avg
|> Nx.multiply(i)
|> Nx.add(obs)
|> Nx.divide(Nx.add(i, 1))
end
@doc """
Returns a function which computes a running sum given current sum,
new observation, and current iteration.
## Examples
iex> cur_sum = 12
iex> iteration = 2
iex> y_true = Nx.tensor([0, 1, 0, 1])
iex> y_pred = Nx.tensor([1, 1, 0, 1])
iex> fps = Axon.Metrics.running_sum(&Axon.Metrics.false_positives/2)
iex> fps.(cur_sum, [y_true, y_pred], iteration)
#Nx.Tensor<
s64
13
>
"""
def running_sum(metric) do
&running_sum_impl(&1, apply(metric, &2), &3)
end
defnp running_sum_impl(sum, obs, _) do
Nx.add(sum, obs)
end
end
| 24.259524 | 106 | 0.595446 |
9eed6db1358f61daaa8a2a6cc9d767b5bf50e8c9 | 5,761 | exs | Elixir | test/core/sup_tree_core/tenant_executor_pools_manager_test.exs | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | test/core/sup_tree_core/tenant_executor_pools_manager_test.exs | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | test/core/sup_tree_core/tenant_executor_pools_manager_test.exs | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | # Copyright(c) 2015-2018 ACCESS CO., LTD. All rights reserved.
defmodule AntikytheraCore.TenantExecutorPoolsManagerTest do
use Croma.TestCase
alias Antikythera.Test.ProcessHelper
alias AntikytheraCore.ExecutorPool
alias AntikytheraCore.ExecutorPool.Setting, as: EPoolSetting
alias AntikytheraCore.ExecutorPool.TenantSetting
alias AntikytheraCore.ExecutorPool.RegisteredName, as: RegName
alias AntikytheraCore.Ets.TenantToGearsMapping
alias AntikytheraCore.Path, as: CorePath
@tenant_id "g_12345678"
@epool_id {:tenant, @tenant_id}
@sup_name RegName.supervisor_unsafe(@epool_id)
@tenant_setting_path CorePath.tenant_setting_file_path(@tenant_id)
defp wait_until_one_off_fetcher_finishes() do
case :sys.get_state(TenantExecutorPoolsManager)[:fetcher] do
nil -> :ok
{pid, _ref} ->
ProcessHelper.monitor_wait(pid)
:timer.sleep(10)
assert :sys.get_state(TenantExecutorPoolsManager)[:fetcher] == nil
end
end
defp send_check_and_wait() do
send(TenantExecutorPoolsManager, :check)
wait_until_one_off_fetcher_finishes()
end
defp put_tenant_setting(setting) do
TenantSetting.put(@tenant_id, setting)
send_check_and_wait()
end
defp assert_record_exists(gears) do
assert :ets.lookup(TenantToGearsMapping.table_name(), @tenant_id) == [{@tenant_id, gears}]
end
defp assert_record_not_exists() do
assert :ets.lookup(TenantToGearsMapping.table_name(), @tenant_id) == []
end
defp make_setting(na, sa, sj, ws, gears) do
%TenantSetting{n_pools_a: na, pool_size_a: sa, pool_size_j: sj, ws_max_connections: ws, gears: gears}
end
setup do
File.write!(CorePath.tenant_ids_file_path(), :zlib.gzip(@tenant_id))
on_exit(fn ->
# Remove setting => stop and remove ETS record
ExecutorPoolHelper.kill_and_wait(@epool_id, fn ->
settings_before = :sys.get_state(TenantExecutorPoolsManager)[:settings]
File.write!(CorePath.tenant_ids_file_path(), :zlib.gzip(""))
File.rm(@tenant_setting_path)
send_check_and_wait()
assert :sys.get_state(TenantExecutorPoolsManager)[:settings] == settings_before
send_check_and_wait()
assert :sys.get_state(TenantExecutorPoolsManager)[:settings] == %{}
end)
end)
end
test "should properly start/stop/update ExecutorPool for tenant" do
# Setting with no gears => don't write it as a file
put_tenant_setting(make_setting(1, 5, 1, 100, []))
refute File.exists?(@tenant_setting_path)
assert is_nil(Process.whereis(@sup_name))
assert_record_not_exists()
# Add setting with a gear => start and set ETS record
put_tenant_setting(make_setting(1, 5, 1, 100, [:gear1]))
assert File.exists?(@tenant_setting_path)
assert is_pid(Process.whereis(@sup_name))
ExecutorPoolHelper.assert_current_setting(@epool_id, 1, 5, 1, 100)
assert_record_exists([:gear1])
ExecutorPoolHelper.wait_until_async_job_queue_added(@epool_id)
# Next :check without doing anything => nothing changed
send_check_and_wait()
assert is_pid(Process.whereis(@sup_name))
ExecutorPoolHelper.assert_current_setting(@epool_id, 1, 5, 1, 100)
assert_record_exists([:gear1])
# Update setting to remove the gear => remove at the next :check
ExecutorPoolHelper.kill_and_wait(@epool_id, fn ->
put_tenant_setting(make_setting(1, 5, 1, 100, []))
refute File.exists?(@tenant_setting_path)
assert is_pid(Process.whereis(@sup_name))
assert_record_exists([:gear1])
:timer.sleep(1_000) # wait until file modification becomes stale
send_check_and_wait() # receive the disassociated tenant setting; still associated
send_check_and_wait() # actually disassociate
assert_record_not_exists()
assert is_nil(Process.whereis(@sup_name))
end)
# Update to associate gear again => start and set ETS record
put_tenant_setting(make_setting(2, 4, 1, 200, [:gear1]))
assert File.exists?(@tenant_setting_path)
assert is_pid(Process.whereis(@sup_name))
ExecutorPoolHelper.assert_current_setting(@epool_id, 2, 4, 1, 200)
assert_record_exists([:gear1])
ExecutorPoolHelper.wait_until_async_job_queue_added(@epool_id)
# Update capacity => apply new capacity setting
put_tenant_setting(make_setting(1, 3, 1, 100, [:gear1]))
assert File.exists?(@tenant_setting_path)
assert is_pid(Process.whereis(@sup_name))
ExecutorPoolHelper.hurry_action_pool_multi(@epool_id)
ExecutorPoolHelper.assert_current_setting(@epool_id, 1, 3, 1, 100)
assert_record_exists([:gear1])
# Update to associate one more gear => update ETS record
put_tenant_setting(make_setting(1, 3, 1, 100, [:gear1, :gear2]))
assert File.exists?(@tenant_setting_path)
assert is_pid(Process.whereis(@sup_name))
ExecutorPoolHelper.hurry_action_pool_multi(@epool_id)
ExecutorPoolHelper.assert_current_setting(@epool_id, 1, 3, 1, 100)
:timer.sleep(10)
assert_record_exists([:gear1, :gear2])
end
test "should tolerate already running executor pools" do
# spawned by someone (previous TenantExecutorpoolsManager) other than the current one
setting = make_setting(1, 5, 1, 100, [:gear1])
TenantSetting.put(@tenant_id, setting)
ExecutorPool.start_executor_pool(@epool_id, EPoolSetting.new!(setting))
ExecutorPoolHelper.assert_current_setting(@epool_id, 1, 5, 1, 100)
ExecutorPoolHelper.wait_until_async_job_queue_added(@epool_id)
refute Map.has_key?(TenantExecutorPoolsManager.all(), @tenant_id)
send_check_and_wait()
assert Map.has_key?(TenantExecutorPoolsManager.all(), @tenant_id)
ExecutorPoolHelper.assert_current_setting(@epool_id, 1, 5, 1, 100)
end
end
| 41.446043 | 105 | 0.733553 |
9eed8f2e269ba62ccd6f76974fd1a763a2ada21e | 9,606 | ex | Elixir | lib/firestorm_web/markdown/html_renderer.ex | palindrom615/firestorm | 0690493c9dcae5c04c63c5321532a7db923e5be7 | [
"MIT"
] | null | null | null | lib/firestorm_web/markdown/html_renderer.ex | palindrom615/firestorm | 0690493c9dcae5c04c63c5321532a7db923e5be7 | [
"MIT"
] | null | null | null | lib/firestorm_web/markdown/html_renderer.ex | palindrom615/firestorm | 0690493c9dcae5c04c63c5321532a7db923e5be7 | [
"MIT"
] | null | null | null | # This is a clone of https://raw.githubusercontent.com/pragdave/earmark/ea6382092c931ab4dd6d0dac6425430c78a61a6d/lib/earmark/html_renderer.ex
#
# Justification for forking this module:
# - Earmark explicitly provides support for this, but we can't reuse the
# existing render_* functions as they are private.
# - No existing ability to customize the classes that are produced for the `pre`
# tag, which is necessary to use http://prismjs.com/plugins/line-numbers/
defmodule FirestormWeb.Markdown.HtmlRenderer do
alias Earmark.Block
alias Earmark.Context
alias Earmark.Options
import Earmark.Inline, only: [convert: 3]
import Earmark.Helpers, only: [escape: 2]
import Earmark.Helpers.HtmlHelpers
import Earmark.Message, only: [add_messages_from: 2, add_messages: 2, get_messages: 1]
import Earmark.Context, only: [append: 2, set_value: 2]
def render(blocks, context = %Context{options: %Options{mapper: mapper}}) do
messages = get_messages(context)
{contexts, html} =
mapper.(blocks, &render_block(&1, put_in(context.options.messages, []))) |> Enum.unzip()
all_messages =
contexts
|> Enum.reduce(messages, fn ctx, messages1 -> messages1 ++ get_messages(ctx) end)
{put_in(context.options.messages, all_messages), html |> IO.iodata_to_binary()}
end
#############
# Paragraph #
#############
defp render_block(%Block.Para{lnb: lnb, lines: lines, attrs: attrs}, context) do
alias FirestormWeb.Markdown.{AutoLinker, EmojiReplacer}
lines =
lines
|> convert(lnb, context)
|> transform_value(&AutoLinker.run/1)
|> transform_value(&EmojiReplacer.run/1)
add_attrs!(lines, "<p>#{lines.value}</p>\n", attrs, [], lnb)
end
########
# Html #
########
defp render_block(%Block.Html{html: html}, context) do
{context, Enum.intersperse(html, ?\n)}
end
defp render_block(%Block.HtmlOther{html: html}, context) do
{context, Enum.intersperse(html, ?\n)}
end
#########
# Ruler #
#########
defp render_block(%Block.Ruler{lnb: lnb, type: "-", attrs: attrs}, context) do
add_attrs!(context, "<hr/>\n", attrs, [{"class", ["thin"]}], lnb)
end
defp render_block(%Block.Ruler{lnb: lnb, type: "_", attrs: attrs}, context) do
add_attrs!(context, "<hr/>\n", attrs, [{"class", ["medium"]}], lnb)
end
defp render_block(%Block.Ruler{lnb: lnb, type: "*", attrs: attrs}, context) do
add_attrs!(context, "<hr/>\n", attrs, [{"class", ["thick"]}], lnb)
end
###########
# Heading #
###########
defp render_block(
%Block.Heading{lnb: lnb, level: level, content: content, attrs: attrs},
context
) do
converted = convert(content, lnb, context)
html = "<h#{level}>#{converted.value}</h#{level}>\n"
add_attrs!(converted, html, attrs, [], lnb)
end
##############
# Blockquote #
##############
defp render_block(%Block.BlockQuote{lnb: lnb, blocks: blocks, attrs: attrs}, context) do
{context1, body} = render(blocks, context)
html = "<blockquote>#{body}</blockquote>\n"
add_attrs!(context1, html, attrs, [], lnb)
end
#########
# Table #
#########
defp render_block(
%Block.Table{lnb: lnb, header: header, rows: rows, alignments: aligns, attrs: attrs},
context
) do
cols = for _align <- aligns, do: "<col>\n"
{context1, html} = add_attrs!(context, "<table>\n", attrs, [], lnb)
html = [html, "<colgroup>\n", cols, "</colgroup>\n"]
context2 = set_value(context1, html)
context3 =
if header do
append(add_trs(append(context2, "<thead>\n"), [header], "th", aligns, lnb), "</thead>\n")
else
# Maybe an error, needed append(context, html)
context2
end
context4 = add_trs(context3, rows, "td", aligns, lnb)
{context4, [context4.value, "</table>\n"]}
end
########
# Code #
########
defp render_block(
%Block.Code{lnb: lnb, language: language, attrs: attrs} = block,
context = %Context{options: options}
) do
class =
if language, do: ~s{ class="#{code_classes(language, options.code_class_prefix)}"}, else: ""
tag = ~s[<pre class="#{pre_classes()}"><code#{class}>]
lines = options.render_code.(block)
html = ~s[#{tag}#{lines}</code></pre>\n]
add_attrs!(context, html, attrs, [], lnb)
end
#########
# Lists #
#########
defp render_block(
%Block.List{lnb: lnb, type: type, blocks: items, attrs: attrs, start: start},
context
) do
{context1, content} = render(items, context)
html = "<#{type}#{start}>\n#{content}</#{type}>\n"
add_attrs!(context1, html, attrs, [], lnb)
end
# format a single paragraph list item, and remove the para tags
defp render_block(
%Block.ListItem{lnb: lnb, blocks: blocks, spaced: false, attrs: attrs},
context
)
when length(blocks) == 1 do
{context1, content} = render(blocks, context)
content = Regex.replace(~r{</?p>}, content, "")
html = "<li>#{content}</li>\n"
add_attrs!(context1, html, attrs, [], lnb)
end
# format a spaced list item
defp render_block(%Block.ListItem{lnb: lnb, blocks: blocks, attrs: attrs}, context) do
{context1, content} = render(blocks, context)
html = "<li>#{content}</li>\n"
add_attrs!(context1, html, attrs, [], lnb)
end
##################
# Footnote Block #
##################
defp render_block(%Block.FnList{blocks: footnotes}, context) do
items =
Enum.map(footnotes, fn note ->
blocks = append_footnote_link(note)
%Block.ListItem{attrs: "#fn:#{note.number}", type: :ol, blocks: blocks}
end)
{context1, html} = render_block(%Block.List{type: :ol, blocks: items}, context)
{context1, Enum.join([~s[<div class="footnotes">], "<hr>", html, "</div>"], "\n")}
end
#######################################
# Isolated IALs are rendered as paras #
#######################################
defp render_block(%Block.Ial{verbatim: verbatim}, context) do
{context, "<p>{:#{verbatim}}</p>\n"}
end
####################
# IDDef is ignored #
####################
defp render_block(%Block.IdDef{}, context), do: {context, ""}
###########
# Plugins #
###########
defp render_block(%Block.Plugin{lines: lines, handler: handler}, context) do
case handler.as_html(lines) do
html when is_list(html) -> {context, html}
{html, errors} -> {add_messages(context, errors), html}
html -> {context, [html]}
end
end
#####################################
# And here are the inline renderers #
#####################################
def br, do: "<br/>"
def codespan(text), do: ~s[<code class="inline">#{text}</code>]
def em(text), do: "<em>#{text}</em>"
def strong(text), do: "<strong>#{text}</strong>"
def strikethrough(text), do: "<del>#{text}</del>"
def link(url, text), do: ~s[<a href="#{url}">#{text}</a>]
def link(url, text, nil), do: ~s[<a href="#{url}">#{text}</a>]
def link(url, text, title), do: ~s[<a href="#{url}" title="#{title}">#{text}</a>]
def image(path, alt, nil) do
~s[<img src="#{path}" alt="#{alt}"/>]
end
def image(path, alt, title) do
~s[<img src="#{path}" alt="#{alt}" title="#{title}"/>]
end
def footnote_link(ref, backref, number),
do: ~s[<a href="##{ref}" id="#{backref}" class="footnote" title="see footnote">#{number}</a>]
# Table rows
def add_trs(context, rows, tag, aligns, lnb) do
numbered_rows =
rows
|> Enum.zip(Stream.iterate(lnb, &(&1 + 1)))
# for {row, lnb1} <- numbered_rows, do: "<tr>\n#{add_tds(context, row, tag, aligns, lnb1)}\n</tr>\n"
numbered_rows
|> Enum.reduce(context, fn {row, lnb}, ctx ->
append(add_tds(append(ctx, "<tr>\n"), row, tag, aligns, lnb), "\n</tr>\n")
end)
end
defp add_tds(context, row, tag, aligns, lnb) do
Enum.reduce(1..length(row), context, add_td_fn(row, tag, aligns, lnb))
end
defp add_td_fn(row, tag, aligns, lnb) do
fn n, ctx ->
style =
case Enum.at(aligns, n - 1, :default) do
:default -> ""
align -> " style=\"text-align: #{align}\""
end
col = Enum.at(row, n - 1)
converted = convert(col, lnb, ctx)
append(add_messages_from(ctx, converted), "<#{tag}#{style}>#{converted.value}</#{tag}>")
end
end
###############################
# Append Footnote Return Link #
###############################
def append_footnote_link(note = %Block.FnDef{}) do
fnlink =
~s[<a href="#fnref:#{note.number}" title="return to article" class="reversefootnote">↩</a>]
[last_block | blocks] = Enum.reverse(note.blocks)
last_block = append_footnote_link(last_block, fnlink)
Enum.reverse([last_block | blocks])
|> List.flatten()
end
def append_footnote_link(block = %Block.Para{lines: lines}, fnlink) do
[last_line | lines] = Enum.reverse(lines)
last_line = "#{last_line} #{fnlink}"
[put_in(block.lines, Enum.reverse([last_line | lines]))]
end
def append_footnote_link(block, fnlink) do
[block, %Block.Para{lines: fnlink}]
end
def render_code(%Block.Code{lines: lines}) do
lines |> Enum.join("\n") |> escape(true)
end
defp code_classes(language, prefix) do
["" | String.split(prefix || "")]
|> Enum.map(fn pfx -> "#{pfx}#{language}" end)
|> Enum.join(" ")
end
defp pre_classes() do
"line-numbers"
end
defp transform_value(context, transformer) do
%{context | value: transformer.(context.value)}
end
end
# SPDX-License-Identifier: Apache-2.0
| 30.788462 | 141 | 0.589423 |
9eeda97aa5048492414e67c2573b6f58292b87fd | 14,218 | ex | Elixir | lib/ex_admin/helpers.ex | landonwilkins/ex_admin | bc6fa5c5e82add16b90e92f9abf388f5b04f67ec | [
"MIT"
] | null | null | null | lib/ex_admin/helpers.ex | landonwilkins/ex_admin | bc6fa5c5e82add16b90e92f9abf388f5b04f67ec | [
"MIT"
] | 1 | 2021-07-08T17:24:01.000Z | 2021-11-02T09:49:33.000Z | lib/ex_admin/helpers.ex | landonwilkins/ex_admin | bc6fa5c5e82add16b90e92f9abf388f5b04f67ec | [
"MIT"
] | 2 | 2020-01-14T21:10:46.000Z | 2021-07-07T16:36:49.000Z | Code.ensure_compiled(ExAdmin.Utils)
defmodule ExAdmin.Helpers do
@moduledoc false
require Logger
require Integer
use Xain
import Kernel, except: [to_string: 1]
import ExAdmin.Utils
import ExAdmin.Render
def build_fieldset_legend(nil), do: []
def build_fieldset_legend(""), do: []
def build_fieldset_legend(name) do
[
legend ".inputs" do
span(name)
end
]
end
def build_link(nil, _, _, _, _), do: ""
def build_link("", _, _, _, _), do: ""
def build_link(contents, _conn, %{link: false}, _resource, _field_name), do: contents
def build_link(contents, conn, _, resource, field_name) do
case Map.get(resource, field_name) do
nil ->
contents
res when is_map(res) ->
if ExAdmin.Utils.authorized_action?(conn, :index, res.__struct__) do
path = admin_resource_path(res, :index)
"<a href='#{path}'>#{contents}</a>"
else
contents
end
end
end
def model_name(%{__struct__: name}), do: model_name(name)
def model_name(resource) when is_atom(resource) do
if has_function?(resource, :model_name, 0) do
resource.model_name()
else
resource |> ExAdmin.Utils.base_name() |> Inflex.underscore()
end
end
def build_link_for({:safe, _} = safe_contents, conn, opts, resource, field_name) do
case Map.get(resource, field_name) do
nil ->
safe_contents
%{__meta__: _} = res ->
build_content_link(true, conn, res, safe_contents)
_ ->
build_content_link(opts[:link], conn, resource, safe_contents)
end
end
def build_link_for("", _, _, _, _), do: ""
def build_link_for(nil, _, _, _, _), do: ""
def build_link_for(contents, _, %{link: false}, _, _), do: contents
def build_link_for(contents, d, a, b, c) do
contents
|> Phoenix.HTML.html_escape()
|> build_link_for(d, a, b, c)
end
defp build_content_link(link?, conn, resource, contents) do
if link? && ExAdmin.Utils.authorized_action?(conn, :show, resource) do
path = admin_resource_path(resource, :show)
a(href: path) do
contents
end
else
contents
end
end
def build_header_field(field, fun) do
case field do
{f_name, _} -> f_name
f_name -> f_name
end
|> fun.()
end
def get_relationship(resource, field_name) do
Map.get(resource, field_name, %{})
end
def map_relationship_fields(resource, fields, separator \\ " ")
def map_relationship_fields(nil, _fields, _separator), do: ""
def map_relationship_fields(resource, fields, separator) do
Enum.map(fields, &get_resource_field(resource, &1))
|> Enum.join(separator)
end
def get_association_fields(%{fields: fields}), do: fields
def get_association_fields(%{}), do: [:name]
def get_association_owner_key(resource, association) when is_binary(association),
do: get_association_owner_key(resource, String.to_atom(association))
def get_association_owner_key(resource, association) do
resource.__struct__.__schema__(:association, association).owner_key
end
defp get_field_type(%{__struct__: resource_struct, __meta__: _}, field) do
resource_struct.__schema__(:type, field)
end
defp get_field_type(_resource, _field), do: nil
@doc """
Builds a web field.
Handles parsing relationships, linking to the relationship, passing a
concatenated string of each of the given fields.
"""
def build_field(resource, conn, field_name, fun) do
case field_name do
{f_name, %{has_many: _} = map2} ->
_build_field(map2, conn, resource, f_name)
|> fun.(f_name)
{f_name, %{} = opts} ->
f_name =
case get_field_type(resource, f_name) do
nil -> f_name
type -> {type, f_name}
end
build_single_field(resource, conn, f_name, opts)
|> fun.(f_name)
{f_name, []} ->
build_single_field(resource, conn, f_name, %{})
|> fun.(f_name)
_ ->
fun.("", :none)
end
end
def build_single_field(resource, conn, {_, f_name}, opts) do
build_single_field(resource, conn, f_name, opts)
end
def build_single_field(resource, conn, f_name, %{fun: fun, image: true} = opts) do
attributes =
opts
|> Map.delete(:fun)
|> Map.delete(:image)
|> build_attributes
img(attributes, src: fun.(resource))
|> build_link_for(conn, opts, resource, f_name)
end
def build_single_field(resource, conn, f_name, %{toggle: true}) do
build_single_field(resource, conn, f_name, %{toggle: ~w(YES NO)})
end
def build_single_field(resource, _conn, f_name, %{toggle: [yes, no]}) do
path = fn attr_value ->
admin_resource_path(resource, :toggle_attr, [[attr_name: f_name, attr_value: attr_value]])
end
current_value = Map.get(resource, f_name)
[yes_btn_css, no_btn_css] =
case current_value do
true ->
["btn-primary", "btn-default"]
false ->
["btn-default", "btn-primary"]
value ->
raise ArgumentError.exception(
"`toggle` option could be used only with columns of boolean type.\nBut `#{
f_name
}` is #{inspect(IEx.Info.info(value))}\nwith value == #{inspect(value)}"
)
end
markup do
a([
{:id, "#{f_name}_true_#{resource.id}"},
{:class, "toggle btn btn-sm #{yes_btn_css}"},
{:href, path.(true)},
{"data-remote", true},
{"data-method", "put"},
{:disabled, !!current_value}
]) do
text(yes)
end
a([
{:id, "#{f_name}_false_#{resource.id}"},
{:class, "toggle btn btn-sm #{no_btn_css}"},
{:href, path.(false)},
{"data-remote", true},
{"data-method", "put"},
{:disabled, !current_value}
]) do
text(no)
end
end
end
def build_single_field(resource, conn, f_name, %{fun: fun} = opts) do
markup :nested do
fun.(resource)
end
|> build_link_for(conn, opts, resource, f_name)
end
def build_single_field(%{__struct__: resource_struct} = resource, conn, f_name, opts) do
resource_struct.__schema__(:type, f_name)
|> build_single_field_type(resource, conn, f_name, opts)
end
def build_single_field(%{} = resource, conn, f_name, opts) do
build_single_field_type(:array_map, resource, conn, f_name, opts)
end
defp build_single_field_type({:array, type}, resource, conn, f_name, opts)
when type in [:string, :integer] do
case get_resource_field(resource, f_name, opts) do
list when is_list(list) ->
Enum.map(list, &to_string(&1))
|> Enum.join(", ")
other ->
to_string(other)
end
|> build_link_for(conn, opts, resource, f_name)
end
defp build_single_field_type(:array_map, resource, conn, f_name, opts) do
Map.get(resource, to_string(f_name), "")
|> build_link_for(conn, opts, resource, f_name)
end
defp build_single_field_type(_, resource, conn, f_name, opts) do
get_resource_field(resource, f_name, opts)
|> format_contents
|> build_link_for(conn, opts, resource, f_name)
end
defp format_contents(contents) when is_list(contents) do
contents
|> Enum.map(&format_contents/1)
|> to_string
end
defp format_contents(%{__struct__: _} = contents), do: to_string(contents)
defp format_contents(%{} = contents) do
Enum.reduce(contents, [], fn {k, v}, acc ->
value = ExAdmin.Render.to_string(v)
["#{k}: #{value}" | acc]
end)
|> Enum.reverse()
|> Enum.join(", ")
end
defp format_contents(contents), do: to_string(contents)
def get_resource_model(resources) do
case resources do
[] ->
""
[resource | _] ->
get_resource_model(resource)
%{__struct__: name} ->
name |> base_name |> Inflex.underscore()
%{} ->
:map
end
end
defp _build_field(%{fields: fields} = map, conn, resource, field_name) do
get_relationship(resource, field_name)
|> map_relationship_fields(fields)
|> build_link(conn, map, resource, field_name)
end
defp _build_field(%{}, _, _resource, _field_name), do: []
def get_resource_field2(resource, field_name) do
case Map.get(resource, field_name) do
nil -> []
%Ecto.Association.NotLoaded{} -> []
other -> other
end
end
def get_resource_field(resource, field, opts \\ %{}) when is_map(resource) do
opts = Enum.into(opts, %{})
case resource do
%{__struct__: struct_name} ->
cond do
field in struct_name.__schema__(:fields) ->
Map.get(resource, field)
field in struct_name.__schema__(:associations) ->
get_relationship(resource, field)
|> map_relationship_fields(get_association_fields(opts))
has_function?(struct_name, field, 1) ->
try_function(struct_name, resource, field, fn _error ->
raise ExAdmin.RuntimeError,
message: "Could not call resource function #{:field} on #{struct_name}"
end)
function_exported?(
ExAdmin.get_registered(resource.__struct__).__struct__,
:display_name,
1
) ->
apply(ExAdmin.get_registered(resource.__struct__).__struct__, :display_name, [
resource
])
function_exported?(resource.__struct__, :display_name, 1) ->
apply(resource.__struct__, :display_name, [resource])
true ->
case resource.__struct__.__schema__(:fields) do
[_, first | _] ->
Map.get(resource, first)
[id | _] ->
Map.get(resource, id)
_ ->
raise ExAdmin.RuntimeError,
message: "Could not find field #{inspect(field)} in #{inspect(resource)}"
end
end
_ ->
raise ExAdmin.RuntimeError, message: "Resource must be a struct"
end
end
def get_name_field(resource_model) do
fields = resource_model.__schema__(:fields)
name_field = fields |> Enum.find(fn field -> field == :name || field == :title end)
if name_field do
name_field
else
fields |> Enum.find(fn field -> resource_model.__schema__(:type, field) == :string end)
end
end
def display_name(resource) do
defn = ExAdmin.get_registered(resource.__struct__)
cond do
is_nil(defn) ->
get_name_column_field(resource)
function_exported?(defn.__struct__, :display_name, 1) ->
apply(defn.__struct__, :display_name, [resource])
function_exported?(resource.__struct__, :display_name, 1) ->
apply(resource.__struct__, :display_name, [resource])
true ->
case defn.name_column do
nil -> get_name_column_field(resource)
name_field -> resource |> Map.get(name_field) |> to_string
end
end
end
defp get_name_column_field(resource) do
case get_name_field(resource.__struct__) do
nil -> inspect(resource)
field -> Map.get(resource, field)
end
end
def resource_identity(resource, field \\ :name)
def resource_identity(resource, field) when is_map(resource) do
case Map.get(resource, field) do
nil ->
case resource do
%{__struct__: struct_name} ->
if {field, 1} in struct_name.__info__(:functions) do
try do
apply(struct_name, field, [resource])
rescue
_ ->
struct_name |> base_name |> titleize
end
else
struct_name |> base_name |> titleize
end
_ ->
""
end
name ->
name
end
end
def resource_identity(_, _), do: ""
def has_function?(struct_name, function, arity) do
{function, arity} in struct_name.__info__(:functions)
end
def try_function(struct_name, resource, function, rescue_fun \\ nil) do
try do
apply(struct_name, function, [resource])
rescue
error ->
if rescue_fun, do: rescue_fun.(error)
end
end
def timestamp do
:os.timestamp() |> Tuple.to_list() |> Enum.join() |> String.to_integer()
end
def group_by(collection, fun) do
list =
Enum.map(collection, fun)
|> Enum.uniq_by(& &1)
|> Enum.map(&{&1, []})
Enum.reduce(collection, list, fn item, acc ->
key = fun.(item)
{_, val} = List.keyfind(acc, key, 0)
List.keyreplace(acc, key, 0, {key, val ++ [item]})
end)
end
def group_reduce_by_reverse(collection) do
empty =
Keyword.keys(collection)
|> Enum.reduce([], &Keyword.put(&2, &1, []))
Enum.reduce(collection, empty, fn {k, v}, acc ->
Keyword.put(acc, k, [v | acc[k]])
end)
end
def group_reduce_by(collection) do
group_reduce_by_reverse(collection)
|> Enum.reduce([], fn {k, v}, acc ->
Keyword.put(acc, k, Enum.reverse(v))
end)
end
def to_class(prefix, field_name), do: prefix <> to_class(field_name)
def to_class({_, field_name}), do: to_class(field_name)
def to_class(field_name) when is_binary(field_name),
do: field_name_to_class(Inflex.parameterize(field_name, "_"))
def to_class(field_name) when is_atom(field_name),
do: field_name_to_class(Atom.to_string(field_name))
def build_attributes(%{} = opts) do
build_attributes(Map.to_list(opts))
end
def build_attributes(opts) do
Enum.reduce(opts, "", fn {k, v}, acc ->
acc <> " #{k}='#{v}'"
end)
end
def translate_field(defn, field) do
case Regex.scan(~r/(.+)_id$/, Atom.to_string(field)) do
[[_, assoc]] ->
assoc = String.to_atom(assoc)
if assoc in defn.resource_model.__schema__(:associations), do: assoc, else: field
_ ->
case defn.resource_model.__schema__(:type, field) do
:map -> {:map, field}
{:array, :map} -> {:maps, field}
_ -> field
end
end
end
def field_name_to_class(field_name) do
parameterize(String.replace_suffix(field_name, "?", ""))
end
end
| 27.554264 | 96 | 0.613518 |
9eee253897dc39bf77b5aa1b884a3b7611db841b | 892 | ex | Elixir | clients/composer/lib/google_api/composer/v1beta1/metadata.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/composer/lib/google_api/composer/v1beta1/metadata.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/composer/lib/google_api/composer/v1beta1/metadata.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Composer.V1beta1 do
@moduledoc """
API client metadata for GoogleApi.Composer.V1beta1.
"""
@discovery_revision "20200505"
def discovery_revision(), do: @discovery_revision
end
| 33.037037 | 74 | 0.761211 |
9eee29563801a87b1b39c25add766cf2c9f4e5f1 | 3,778 | ex | Elixir | clients/sql_admin/lib/google_api/sql_admin/v1beta4/api/flags.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/sql_admin/lib/google_api/sql_admin/v1beta4/api/flags.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/sql_admin/lib/google_api/sql_admin/v1beta4/api/flags.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SQLAdmin.V1beta4.Api.Flags do
@moduledoc """
API calls for all endpoints tagged `Flags`.
"""
alias GoogleApi.SQLAdmin.V1beta4.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
List all available database flags for Cloud SQL instances.
## Parameters
* `connection` (*type:* `GoogleApi.SQLAdmin.V1beta4.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:databaseVersion` (*type:* `String.t`) - Database type and version you want to retrieve flags for. By default, this method returns flags for all database types and versions.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SQLAdmin.V1beta4.Model.FlagsListResponse{}}` on success
* `{:error, info}` on failure
"""
@spec sql_flags_list(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.SQLAdmin.V1beta4.Model.FlagsListResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def sql_flags_list(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:databaseVersion => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/sql/v1beta4/flags", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SQLAdmin.V1beta4.Model.FlagsListResponse{}])
end
end
| 43.930233 | 196 | 0.656167 |
9eee406502b943899e9704ecd5ded38ba7d77607 | 1,389 | exs | Elixir | examples/apps/ecto_example/test/ecto_example_test.exs | backdoer/elixir_agent | b42f891ed667fa799d0b82244fb465ce54429127 | [
"Apache-2.0"
] | null | null | null | examples/apps/ecto_example/test/ecto_example_test.exs | backdoer/elixir_agent | b42f891ed667fa799d0b82244fb465ce54429127 | [
"Apache-2.0"
] | null | null | null | examples/apps/ecto_example/test/ecto_example_test.exs | backdoer/elixir_agent | b42f891ed667fa799d0b82244fb465ce54429127 | [
"Apache-2.0"
] | null | null | null | defmodule EctoExampleTest do
use ExUnit.Case
alias NewRelic.Harvest.Collector
setup_all do
# Simulate the agent fully starting up
Process.whereis(Collector.TaskSupervisor) ||
NewRelic.EnabledSupervisor.start_link(:ok)
:ok
end
test "basic HTTP request flow" do
TestHelper.restart_harvest_cycle(Collector.Metric.HarvestCycle)
{:ok, %{body: body}} = request()
assert body =~ "world"
metrics = TestHelper.gather_harvest(Collector.Metric.Harvester)
assert TestHelper.find_metric(
metrics,
"Datastore/statement/Postgres/counts/insert",
2
)
assert TestHelper.find_metric(
metrics,
"Datastore/statement/MySQL/counts/insert",
2
)
assert TestHelper.find_metric(
metrics,
{"Datastore/statement/Postgres/counts/insert", "WebTransaction/Plug/GET//hello"},
2
)
assert TestHelper.find_metric(
metrics,
{"Datastore/statement/MySQL/counts/insert", "WebTransaction/Plug/GET//hello"},
2
)
end
def request() do
http_port = Application.get_env(:ecto_example, :http_port)
{:ok, {{_, _status_code, _}, _headers, body}} =
:httpc.request('http://localhost:#{http_port}/hello')
{:ok, %{body: to_string(body)}}
end
end
| 24.803571 | 94 | 0.614111 |
9eee4af5b8a7a416920589def45c1d8d0fe8f4df | 1,388 | ex | Elixir | lib/loaded_bike/web/controllers/waypoint_controller.ex | GBH/pedal | a2d68c3561f186ee3017a21b4170127b1625e18d | [
"MIT"
] | 48 | 2017-04-25T16:02:08.000Z | 2021-01-23T01:57:29.000Z | lib/loaded_bike/web/controllers/waypoint_controller.ex | GBH/pedal | a2d68c3561f186ee3017a21b4170127b1625e18d | [
"MIT"
] | 5 | 2018-03-09T20:17:55.000Z | 2018-07-23T16:29:21.000Z | lib/loaded_bike/web/controllers/waypoint_controller.ex | GBH/pedal | a2d68c3561f186ee3017a21b4170127b1625e18d | [
"MIT"
] | 4 | 2017-05-21T14:38:38.000Z | 2017-12-29T11:09:54.000Z | defmodule LoadedBike.Web.WaypointController do
use LoadedBike.Web, :controller
use LoadedBike.Web.Controller.Helpers
alias LoadedBike.{Tour, Waypoint, Photo}
def show(conn, %{"tour_id" => tour_id, "id" => id}) do
waypoints_query = Waypoint
|> Waypoint.published
|> Waypoint.select_without_gps
|> order_by(asc: :position)
tour = Tour
|> Tour.published
|> preload([:user, waypoints: ^waypoints_query])
|> Repo.get!(id_from_param(tour_id))
photos_query = Photo
|> order_by(asc: :inserted_at)
waypoint = assoc(tour, :waypoints)
|> Waypoint.published
|> preload(photos: ^photos_query)
|> Repo.get!(id_from_param(id))
|> Map.put(:tour, tour)
prev_waypoint = waypoint
|> Waypoint.previous
|> Waypoint.published
|> Repo.one
next_waypoint = waypoint
|> Waypoint.next
|> Waypoint.published
|> Repo.one
conn
|> add_breadcrumb(name: "All Tours", url: tour_path(conn, :index))
|> add_breadcrumb(name: tour.title, url: tour_path(conn, :show, tour))
|> add_breadcrumb(name: waypoint.title)
|> add_header_title(tour.title)
|> add_header_title(waypoint.title)
|> render("show.html",
tour: tour,
waypoint: waypoint,
next_waypoint: next_waypoint,
prev_waypoint: prev_waypoint)
end
end
| 27.76 | 74 | 0.631124 |
9eee6663093e270d355918cd1ea1472f24512ab2 | 1,598 | exs | Elixir | test/groupher_server_web/mutation/cms/upvotes/works_upvote_test.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 240 | 2018-11-06T09:36:54.000Z | 2022-02-20T07:12:36.000Z | test/groupher_server_web/mutation/cms/upvotes/works_upvote_test.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 363 | 2018-07-11T03:38:14.000Z | 2021-12-14T01:42:40.000Z | test/groupher_server_web/mutation/cms/upvotes/works_upvote_test.exs | mydearxym/mastani_server | f24034a4a5449200165cf4a547964a0961793eab | [
"Apache-2.0"
] | 22 | 2019-01-27T11:47:56.000Z | 2021-02-28T13:17:52.000Z | defmodule GroupherServer.Test.Mutation.Upvotes.WorksUpvote do
@moduledoc false
use GroupherServer.TestTools
alias GroupherServer.CMS
setup do
{:ok, works} = db_insert(:works)
{:ok, user} = db_insert(:user)
guest_conn = simu_conn(:guest)
user_conn = simu_conn(:user, user)
{:ok, ~m(user_conn guest_conn works user)a}
end
describe "[works upvote]" do
@query """
mutation($id: ID!) {
upvoteWorks(id: $id) {
id
}
}
"""
test "login user can upvote a works", ~m(user_conn works)a do
variables = %{id: works.id}
created = user_conn |> mutation_result(@query, variables, "upvoteWorks")
assert created["id"] == to_string(works.id)
end
test "unauth user upvote a works fails", ~m(guest_conn works)a do
variables = %{id: works.id}
assert guest_conn
|> mutation_get_error?(@query, variables, ecode(:account_login))
end
@query """
mutation($id: ID!) {
undoUpvoteWorks(id: $id) {
id
}
}
"""
test "login user can undo upvote to a works", ~m(user_conn works user)a do
{:ok, _} = CMS.upvote_article(:works, works.id, user)
variables = %{id: works.id}
updated = user_conn |> mutation_result(@query, variables, "undoUpvoteWorks")
assert updated["id"] == to_string(works.id)
end
test "unauth user undo upvote a works fails", ~m(guest_conn works)a do
variables = %{id: works.id}
assert guest_conn
|> mutation_get_error?(@query, variables, ecode(:account_login))
end
end
end
| 24.584615 | 82 | 0.61577 |
9eee81b757766b62d316537d4ceaa1cc687bc0df | 88 | exs | Elixir | test/howhow_speak_web/views/page_view_test.exs | pastleo/howhow_speak | c298cc6624cd41da3d3f045ab92d4bcf50b84898 | [
"MIT"
] | 1 | 2020-06-19T11:23:44.000Z | 2020-06-19T11:23:44.000Z | test/howhow_speak_web/views/page_view_test.exs | pastleo/howhow_speak | c298cc6624cd41da3d3f045ab92d4bcf50b84898 | [
"MIT"
] | null | null | null | test/howhow_speak_web/views/page_view_test.exs | pastleo/howhow_speak | c298cc6624cd41da3d3f045ab92d4bcf50b84898 | [
"MIT"
] | null | null | null | defmodule HowhowSpeakWeb.PageViewTest do
use HowhowSpeakWeb.ConnCase, async: true
end
| 22 | 42 | 0.840909 |
9eee88b1c313f2a317e922962ad1e61e0266e4a2 | 1,367 | exs | Elixir | config/config.exs | bengtson/internet_qos | dae17f18aca378bf9d5e9d07500059166f0a8597 | [
"MIT"
] | null | null | null | config/config.exs | bengtson/internet_qos | dae17f18aca378bf9d5e9d07500059166f0a8597 | [
"MIT"
] | null | null | null | config/config.exs | bengtson/internet_qos | dae17f18aca378bf9d5e9d07500059166f0a8597 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :internet_qos, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:internet_qos, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
config :internet_qos,
modem_signal_file:
"/Users/bengm0ra/Projects/FileliF/Compendiums/Internet QOS/Modem - Signal.dat"
# Status server configuration
config :internet_qos, :status_server, host: '10.0.1.181', port: 21200, start: true
| 36.945946 | 82 | 0.755669 |
9eee9174fde37ab0e883f62cd168f918b3452b60 | 198 | exs | Elixir | test/gutenex/geometry_test.exs | tulinmola/gutenex | 09a17dada273e089685176b5a14820430624675f | [
"MIT"
] | 184 | 2015-01-16T23:05:42.000Z | 2018-10-25T18:51:52.000Z | test/gutenex/geometry_test.exs | nicolasva/gutenex | 69aba56001455e3ff6ec6257d95b2f70e3fcffad | [
"MIT"
] | 15 | 2015-05-29T00:28:08.000Z | 2018-10-11T18:55:35.000Z | test/gutenex/geometry_test.exs | nicolasva/gutenex | 69aba56001455e3ff6ec6257d95b2f70e3fcffad | [
"MIT"
] | 32 | 2015-02-14T07:39:48.000Z | 2018-09-12T21:01:02.000Z | defmodule Gutenex.GeometryTest do
use ExUnit.Case, async: true
alias Gutenex.Geometry
test "#move_to should move to a point" do
assert Geometry.move_to({20, 40}) == "20 40 m\n"
end
end
| 22 | 52 | 0.707071 |
9eee99d11ad9f8806d0f11404008324e27fd6384 | 2,653 | ex | Elixir | lib/ex_sdp/serializer.ex | membraneframework/membrane-protocol-sdp | 4cb3028d62a722e364196b58b73732567306a931 | [
"Apache-2.0"
] | null | null | null | lib/ex_sdp/serializer.ex | membraneframework/membrane-protocol-sdp | 4cb3028d62a722e364196b58b73732567306a931 | [
"Apache-2.0"
] | 1 | 2020-07-31T10:37:43.000Z | 2020-08-10T09:22:13.000Z | lib/ex_sdp/serializer.ex | membraneframework/membrane-protocol-sdp | 4cb3028d62a722e364196b58b73732567306a931 | [
"Apache-2.0"
] | null | null | null | defmodule ExSDP.Serializer do
@moduledoc """
Module providing helper functions for serialization.
"""
@doc """
Serializes both sdp lines (<type>=<value>) and sdp parameters (<parameter>=<value>)
"""
@spec maybe_serialize(type :: binary(), value :: term()) :: binary()
def maybe_serialize(_type, nil), do: ""
def maybe_serialize(_type, []), do: ""
def maybe_serialize(type, values) when is_list(values),
do: Enum.map_join(values, "\n", fn value -> maybe_serialize(type, value) end)
def maybe_serialize(type, {:framerate, {frames, sec}}),
do: "#{type}=framerate:#{frames}/#{sec}"
def maybe_serialize(type, {:ice_ufrag, value}), do: "#{type}=ice-ufrag:#{value}"
def maybe_serialize(type, {:ice_pwd, value}), do: "#{type}=ice-pwd:#{value}"
def maybe_serialize(type, :ice_lite), do: "#{type}=ice-lite"
def maybe_serialize(type, {:ice_options, value}),
do: "#{type}=ice-options:#{serialize_ice_options(value)}"
def maybe_serialize(type, {:fingerprint, value}),
do: "#{type}=fingerprint:#{serialize_fingerprint(value)}"
def maybe_serialize(type, {:setup, value}), do: "#{type}=setup:#{serialize_setup(value)}"
def maybe_serialize(type, {:mid, value}), do: "#{type}=mid:#{value}"
def maybe_serialize(type, :rtcp_mux), do: "#{type}=rtcp-mux"
def maybe_serialize(type, :rtcp_rsize), do: "#{type}=rtcp-rsize"
def maybe_serialize(type, true), do: "#{type}=1"
def maybe_serialize(type, false), do: "#{type}=0"
def maybe_serialize("dtmf-tones", value), do: "#{value}"
def maybe_serialize(type, {key, value}), do: "#{type}=#{key}:#{value}"
def maybe_serialize(type, value), do: "#{type}=#{value}"
@spec maybe_serialize_hex(String.t(), nil | integer) :: binary
def maybe_serialize_hex(_type, nil), do: ""
def maybe_serialize_hex(type, value),
do: "#{type}=#{Integer.to_string(value, 16) |> String.downcase()}"
@spec maybe_serialize_list([String.t()] | nil, String.t()) :: String.t()
def maybe_serialize_list([], _sep), do: ""
def maybe_serialize_list(nil, _sep), do: ""
def maybe_serialize_list(list, sep), do: Enum.map_join(list, sep, &"#{&1}")
defp serialize_ice_options(ice_options) do
Bunch.listify(ice_options) |> Enum.join(" ")
end
defp serialize_fingerprint(fingerprint) do
case fingerprint do
{:sha1, value} -> "sha-1 #{value}"
{:sha224, value} -> "sha-224 #{value}"
{:sha256, value} -> "sha-256 #{value}"
{:sha384, value} -> "sha-384 #{value}"
{:sha512, value} -> "sha-512 #{value}"
end
end
defp serialize_setup(setup) when setup in [:active, :passive, :actpass, :holdconn],
do: Atom.to_string(setup)
end
| 38.449275 | 91 | 0.655484 |
9eeec34973f23d80bc8831223a07bb602ddcecce | 2,496 | ex | Elixir | lib/ash/resource/relationships/belongs_to.ex | ChristianTovar/ash | 66435322786c5d0b90a34051da969b68dcc8a045 | [
"MIT"
] | null | null | null | lib/ash/resource/relationships/belongs_to.ex | ChristianTovar/ash | 66435322786c5d0b90a34051da969b68dcc8a045 | [
"MIT"
] | null | null | null | lib/ash/resource/relationships/belongs_to.ex | ChristianTovar/ash | 66435322786c5d0b90a34051da969b68dcc8a045 | [
"MIT"
] | null | null | null | defmodule Ash.Resource.Relationships.BelongsTo do
@moduledoc "Represents a belongs_to relationship on a resource"
defstruct [
:name,
:destination,
:primary_key?,
:define_field?,
:field_type,
:destination_field,
:private?,
:source_field,
:source,
:not_found_message,
:violation_message,
:required?,
:filter,
:writable?,
:context,
:description,
cardinality: :one,
type: :belongs_to
]
@type t :: %__MODULE__{
type: :belongs_to,
cardinality: :one,
writable?: boolean,
name: atom,
filter: Ash.Filter.t(),
source: Ash.Resource.t(),
destination: Ash.Resource.t(),
required?: boolean,
primary_key?: boolean,
define_field?: boolean,
field_type: term,
destination_field: atom,
private?: boolean,
source_field: atom | nil,
description: String.t()
}
import Ash.Resource.Relationships.SharedOptions
alias Ash.OptionsHelpers
@global_opts shared_options()
|> OptionsHelpers.set_default!(:destination_field, :id)
|> OptionsHelpers.append_doc!(:source_field, "Defaults to <name>_id")
@opt_schema Ash.OptionsHelpers.merge_schemas(
[
primary_key?: [
type: :boolean,
default: false,
doc: "Whether this field is, or is part of, the primary key of a resource."
],
required?: [
type: :boolean,
default: false,
doc:
"Whether this relationship must always be present, e.g: must be included on creation, and never removed (it can still be changed)"
],
define_field?: [
type: :boolean,
default: true,
doc:
"If set to `false` a field is not created on the resource for this relationship, and one must be manually added in `attributes`."
],
field_type: [
type: :any,
default: :uuid,
doc: "The field type of the automatically created field."
]
],
@global_opts,
"Relationship Options"
)
@doc false
def opt_schema, do: @opt_schema
end
| 30.072289 | 152 | 0.515224 |
9eeeca0ea2d286469ac3b9ab124bdc48eda01f1a | 531 | ex | Elixir | examples/bot/lib/example.ex | ryanwinchester/irc | acb16b470144dcd664fb2026afa0b9a1727f2900 | [
"MIT"
] | 147 | 2015-01-12T15:05:12.000Z | 2022-03-17T06:32:41.000Z | examples/bot/lib/example.ex | ryanwinchester/irc | acb16b470144dcd664fb2026afa0b9a1727f2900 | [
"MIT"
] | 74 | 2015-01-24T23:07:53.000Z | 2021-04-23T14:27:43.000Z | examples/bot/lib/example.ex | ryanwinchester/irc | acb16b470144dcd664fb2026afa0b9a1727f2900 | [
"MIT"
] | 51 | 2015-02-13T21:00:38.000Z | 2022-03-03T15:38:12.000Z | defmodule Example do
use Application
alias Example.Bot
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@impl true
def start(_type, _args) do
children =
Application.get_env(:exirc_example, :bots)
|> Enum.map(fn bot -> worker(Bot, [bot]) end)
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Example.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 26.55 | 61 | 0.704331 |
9eeeca5c057714005737bddb1d2aea44e3a6cd12 | 88 | exs | Elixir | test/utils_test.exs | ntsai/xadmin | 82d8be63e69483ff66472481e66f9870face355b | [
"MIT"
] | 5 | 2016-08-30T01:23:50.000Z | 2021-09-22T14:39:00.000Z | test/utils_test.exs | ntsai/xadmin | 82d8be63e69483ff66472481e66f9870face355b | [
"MIT"
] | null | null | null | test/utils_test.exs | ntsai/xadmin | 82d8be63e69483ff66472481e66f9870face355b | [
"MIT"
] | 1 | 2021-12-10T11:10:55.000Z | 2021-12-10T11:10:55.000Z | defmodule XAdmin.UtilsTest do
use ExUnit.Case, async: true
doctest XAdmin.Utils
end
| 17.6 | 30 | 0.784091 |
9eeecd16adb7adbb8087eff800c5fe26f926d51f | 493 | ex | Elixir | apps/hefty/lib/hefty/repo/trade.ex | Cinderella-Man/crypto-streamer | b1e990d375f7143c5149930be991249f0d9c3ee3 | [
"MIT"
] | 49 | 2019-10-28T22:27:28.000Z | 2021-10-11T06:40:29.000Z | apps/hefty/lib/hefty/repo/trade.ex | Cinderella-Man/crypto-streamer | b1e990d375f7143c5149930be991249f0d9c3ee3 | [
"MIT"
] | 9 | 2019-08-30T13:15:36.000Z | 2019-10-10T21:25:14.000Z | apps/hefty/lib/hefty/repo/trade.ex | Cinderella-Man/crypto-streamer | b1e990d375f7143c5149930be991249f0d9c3ee3 | [
"MIT"
] | 7 | 2019-10-31T06:19:26.000Z | 2021-09-30T04:20:58.000Z | defmodule Hefty.Repo.Trade do
@moduledoc false
use Ecto.Schema
@primary_key {:id, :integer, autogenerate: false}
schema "trades" do
field(:symbol, :string)
field(:buy_price, :string)
field(:sell_price, :string)
field(:quantity, :string)
field(:state, :string)
field(:buy_time, :integer)
field(:sell_time, :integer)
field(:fee_rate, :string)
field(:profit_base_currency, :string)
field(:profit_percentage, :string)
timestamps()
end
end
| 21.434783 | 51 | 0.6714 |
9eef3ef19e97d8c30e821f82b3792f4c26280c83 | 2,138 | ex | Elixir | clients/big_query_connection/lib/google_api/big_query_connection/v1beta1/model/set_iam_policy_request.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/big_query_connection/lib/google_api/big_query_connection/v1beta1/model/set_iam_policy_request.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/big_query_connection/lib/google_api/big_query_connection/v1beta1/model/set_iam_policy_request.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigQueryConnection.V1beta1.Model.SetIamPolicyRequest do
@moduledoc """
Request message for `SetIamPolicy` method.
## Attributes
* `policy` (*type:* `GoogleApi.BigQueryConnection.V1beta1.Model.Policy.t`, *default:* `nil`) - REQUIRED: The complete policy to be applied to the `resource`. The size of the policy is limited to a few 10s of KB. An empty policy is a valid policy but certain Google Cloud services (such as Projects) might reject them.
* `updateMask` (*type:* `String.t`, *default:* `nil`) - OPTIONAL: A FieldMask specifying which fields of the policy to modify. Only the fields in the mask will be modified. If no mask is provided, the following default mask is used: `paths: "bindings, etag"`
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:policy => GoogleApi.BigQueryConnection.V1beta1.Model.Policy.t() | nil,
:updateMask => String.t() | nil
}
field(:policy, as: GoogleApi.BigQueryConnection.V1beta1.Model.Policy)
field(:updateMask)
end
defimpl Poison.Decoder, for: GoogleApi.BigQueryConnection.V1beta1.Model.SetIamPolicyRequest do
def decode(value, options) do
GoogleApi.BigQueryConnection.V1beta1.Model.SetIamPolicyRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQueryConnection.V1beta1.Model.SetIamPolicyRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.76 | 321 | 0.749298 |
9eef7e82b5d7feaf65d3bd2aefeb410ec8ab7419 | 2,385 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v35/model/day_part_targeting.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v35/model/day_part_targeting.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v35/model/day_part_targeting.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V35.Model.DayPartTargeting do
@moduledoc """
Day Part Targeting.
## Attributes
* `daysOfWeek` (*type:* `list(String.t)`, *default:* `nil`) - Days of the week when the ad will serve. Acceptable values are: - "SUNDAY" - "MONDAY" - "TUESDAY" - "WEDNESDAY" - "THURSDAY" - "FRIDAY" - "SATURDAY"
* `hoursOfDay` (*type:* `list(integer())`, *default:* `nil`) - Hours of the day when the ad will serve, where 0 is midnight to 1 AM and 23 is 11 PM to midnight. Can be specified with days of week, in which case the ad would serve during these hours on the specified days. For example if Monday, Wednesday, Friday are the days of week specified and 9-10am, 3-5pm (hours 9, 15, and 16) is specified, the ad would serve Monday, Wednesdays, and Fridays at 9-10am and 3-5pm. Acceptable values are 0 to 23, inclusive.
* `userLocalTime` (*type:* `boolean()`, *default:* `nil`) - Whether or not to use the user's local time. If false, the America/New York time zone applies.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:daysOfWeek => list(String.t()) | nil,
:hoursOfDay => list(integer()) | nil,
:userLocalTime => boolean() | nil
}
field(:daysOfWeek, type: :list)
field(:hoursOfDay, type: :list)
field(:userLocalTime)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V35.Model.DayPartTargeting do
def decode(value, options) do
GoogleApi.DFAReporting.V35.Model.DayPartTargeting.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V35.Model.DayPartTargeting do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 45 | 515 | 0.716562 |
9eef8a0834f9772d40d351de80b4e3efc151fe4c | 4,091 | ex | Elixir | lib/legend/error_handler.ex | naramore/legend | 030cacbb761de27c8db73a321ebd2fda51152a15 | [
"MIT"
] | null | null | null | lib/legend/error_handler.ex | naramore/legend | 030cacbb761de27c8db73a321ebd2fda51152a15 | [
"MIT"
] | 16 | 2019-01-15T10:18:03.000Z | 2019-10-22T10:17:06.000Z | lib/legend/error_handler.ex | naramore/legend | 030cacbb761de27c8db73a321ebd2fda51152a15 | [
"MIT"
] | null | null | null | defmodule Legend.ErrorHandler do
@moduledoc """
"""
alias Legend.{Event, Hook, Utils}
# @stacktrace_modules_blacklist []
# @stacktrace_functions_whitelist []
@typedoc """
"""
@type accumulator :: %{
effects_so_far: Legend.effects,
reason: term,
}
@typedoc """
"""
@type error_reason ::
{:raise, Exception.t, Exception.stacktrace} |
{:exit, reason :: term} |
{:throw, value :: term}
@doc """
"""
@callback handle_error(error_reason, Legend.full_name, Legend.effects) ::
error_reason | {:ok, valid_result :: term}
@doc false
defmacro __using__(_opts) do
quote do
@behaviour Legend.ErrorHandler
@impl Legend.ErrorHandler
def handle_error(reason, _location, _effects_so_far) do
{:error, reason}
end
defoverridable [handle_error: 3]
end
end
@doc """
"""
@spec step(Legend.stage, Event.t, accumulator, BaseStage.execute_opts) :: {Event.t, accumulator} | no_return
def step(stage, event, state, opts \\ [])
def step(stage, %Event{name: [:starting, :error_handler]} = e, acc, opts) do
{originating_event, _} = e.context
event = execute_error_handler(stage, originating_event, acc, opts)
{event, %{acc | hooks_left: Hook.merge_hooks(stage, opts)}}
end
def step(stage, %Event{name: [:completed, :error_handler]} = e, state, opts) do
case e.context do
{origin, {:ok, result}} ->
{Event.update(origin, context: result),
%{state | hooks_left: Hook.merge_hooks(stage, opts), reason: nil}}
{_, {:raise, error, stacktrace}} ->
filter_and_reraise(error, stacktrace)
{_, {:throw, value}} ->
throw value
{_, {:exit, reason}} ->
exit reason
end
end
@doc false
@spec maybe_execute_error_handler(Legend.stage, Event.t, accumulator, BaseStage.execute_opts) :: Event.t
def maybe_execute_error_handler(stage, event, acc, opts) do
result = if Keyword.get(opts, :dry_run?, false) do
Keyword.get(opts, :dry_run_result, {:throw, :default})
else
execute_error_handler(stage, event, acc, opts)
end
Event.update(event, name: [:completed, :error_handler],
context: {event, result})
end
@doc """
"""
@spec execute_error_handler(Legend.stage, Event.t, accumulator, BaseStage.execute_opts) ::
error_reason | {:ok, valid_result :: term} | no_return
def execute_error_handler(stage, event, acc, opts \\ []) do
%{effects_so_far: effects_so_far, reason: reason} = acc
timeout = Keyword.get(opts, :timeout, :infinity)
case Utils.execute(get_error_handler(stage), [reason, event.stage_name, effects_so_far], timeout) do
{:error, {:raise, error, stacktrace}} -> {:raise, error, stacktrace}
{:raise, error, stacktrace} -> {:raise, error, stacktrace}
{:error, {:throw, value}} -> {:throw, value}
{:throw, value} -> {:throw, value}
{:error, {:exit, reason}} -> {:exit, reason}
{:ok, result} -> {:ok, result}
# TODO: this should return {:raise, Legend.InvalidHandlerResponseError(...), stacktrace}
#otherwise -> {:error, {:unsupported_error_handler_result_form, otherwise}}
end
end
@doc false
@spec get_error_handler(Legend.stage) :: (error_reason, Legend.full_name, Legend.effects -> error_reason | {:ok, valid_result :: term})
defp get_error_handler(stage) when is_list(stage) do
Keyword.get(stage, :error_handler)
end
defp get_error_handler(stage) when is_atom(stage) do
&stage.error_handler/3
end
# TODO: implement filtering for stacktraces...
@doc false
@spec filter_and_reraise(Exception.t, Exception.stacktrace) :: no_return
defp filter_and_reraise(exception, stacktrace) do
#stacktrace =
# Enum.reject(stacktrace, &match?({mod, fun, _, _} when mod in @stacktrace_modules_blacklist and
# fun not in @stacktrace_functions_whitelist, &1))
reraise(exception, stacktrace)
end
end
defmodule Legend.RaiseErrorHandler do
@moduledoc """
"""
use Legend.ErrorHandler
end
| 32.728 | 137 | 0.651186 |
9eefae3dd946da1b969487cb1512cd06431376ef | 5,601 | ex | Elixir | lib/jhn_elixir/gen_statem.ex | JanHenryNystrom/jhn_elixir | 4feebf6f0f24829965515cdd9a160f3a3695cb7f | [
"Apache-2.0"
] | null | null | null | lib/jhn_elixir/gen_statem.ex | JanHenryNystrom/jhn_elixir | 4feebf6f0f24829965515cdd9a160f3a3695cb7f | [
"Apache-2.0"
] | null | null | null | lib/jhn_elixir/gen_statem.ex | JanHenryNystrom/jhn_elixir | 4feebf6f0f24829965515cdd9a160f3a3695cb7f | [
"Apache-2.0"
] | null | null | null | ##==============================================================================
## Copyright 2020 Jan Henry Nystrom <[email protected]>
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##==============================================================================
defmodule JhnElixir.GenStatem do
Module.register_attribute __MODULE__, :copyright, persist: true
@copyright "(C) 2020, Jan Henry Nystrom <[email protected]>"
alias JhnElixir.Gen
alias JhnElixir.Supervisor
# ====================
# API
# ====================
# --------------------
@spec start(module, any, options) :: on_start
# --------------------
def start(module, init_arg, options \\ []) do
Gen.start(:gen_statem, module, init_arg, options)
end
# --------------------
@spec stop(server, reason :: term, timeout) :: :ok
# --------------------
def stop(server, reason \\ :normal, timeout \\ :infinity) do
Gen.stop(server, reason, timeout)
end
# --------------------
@spec call(server, term, timeout) :: term
# --------------------
def call(server, request, timeout \\ 5000) do
Gen.call(server, request, timeout)
end
# --------------------
@spec cast(server, term) :: :ok
# --------------------
def cast(server, message) do
Gen.cast(server, message)
end
# --------------------
@spec reply(reply_action | [reply_action]) :: :ok
# --------------------
def reply({:reply, from, reply}) do
reply(from, reply)
end
def reply(reply_actions) do
for {:reply, from, reply} <- reply_actions do
reply(from, reply)
end
:ok
end
# --------------------
@spec reply(from, term) :: :ok
# --------------------
def reply(from, reply) do
Gen.reply(from, reply)
end
# ====================
# Callbacks
# ====================
@callback callback_mode() :: callback_mode | [callback_mode | :state_enter]
@callback init(init_arg :: term) ::
{:ok, state, data}
| {:ok, state, data, actions :: action | [action]}
| :ignore
| {:stop, reason :: any}
when state: atom, data: any
@callback terminate(reason, state :: term) :: term
when reason: :normal | :shutdown | {:shutdown, term} | term
@callback code_change(old_vsn, state :: term, extra :: term) ::
{:ok, new_state :: term}
| {:error, reason :: term}
when old_vsn: term | {:down, term}
@callback format_status(reason, pdict_and_state :: list) :: term
when reason: :normal | :terminate
@optional_callbacks callback_mode: 0,
code_change: 3,
terminate: 2,
format_status: 2
# ====================
# Types
# ====================
@type on_start :: {:ok, pid} |
:ignore |
{:error, {:already_started, pid} | term}
@type name :: atom | {:global, term} | {:via, module, term}
@type link :: :link | :nolink
@type options :: [option]
@type option ::
{:name, name} |
{:link, link} |
{:debug, debug} |
{:timeout, timeout} |
{:spawn_opt, Process.spawn_opt()} |
{:hibernate_after, timeout}
@type debug :: [:trace | :log | :statistics | {:log_to_file, Path.t()}]
@type server :: pid | name | {atom, node}
@type from :: {pid, tag :: term}
@type reply_action :: {:reply, from, term}
@type callback_mode :: :state_functions | :handle_event_function
@type action :: :postpone | {:postpone, boolean} |
{:next_event, event_type, term} |
:hibernate | {:hibernate, boolean} |
timeout_action | reply_action
@type event_type :: external_event_type | timeout_event_type | :internal
@type external_event_type :: {:call, from} | :cast | :info
@type timeout_event_type :: :timeout | {:timeout, term} | :state_timeout
@type timeout_action :: term
# ====================
# Macros
# ====================
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
@behaviour JhnElixir.GenStatem
def child_spec(init_arg) do
default = %{id: __MODULE__,
start: {__MODULE__, :start, [init_arg]}}
Supervisor.child_spec(default, unquote(Macro.escape(opts)))
end
# TODO: Remove this on v2.0
@before_compile JhnElixir.GenStatem
@doc false
def callback_mode() do
:state_functions
end
@doc false
def handle_event(event, content, state, data) do
Gen.unexpected(__MODULE__, :event, event)
:keep_state_and_data
end
@doc false
def terminate(_, _) do
:ok
end
@doc false
def code_change(_, state, _) do
{:ok, state}
end
defoverridable child_spec: 1,
callback_mode: 0,
handle_event: 4,
code_change: 3,
terminate: 2
end
end
defmacro __before_compile__(_) do
:ok
end
# ====================
# Internal functions
# ====================
end
| 27.591133 | 80 | 0.535262 |
9eefdd95b0c6c0c5355a81b6b6e63f97dd63bff5 | 114 | exs | Elixir | test/leader_test.exs | atlas-forks/swarm_demo | 30337113eed7f307d31373ff7081c71a8a30a950 | [
"MIT"
] | 8 | 2017-05-17T23:34:23.000Z | 2020-05-03T10:21:17.000Z | test/leader_test.exs | atlas-forks/swarm_demo | 30337113eed7f307d31373ff7081c71a8a30a950 | [
"MIT"
] | null | null | null | test/leader_test.exs | atlas-forks/swarm_demo | 30337113eed7f307d31373ff7081c71a8a30a950 | [
"MIT"
] | 2 | 2018-01-15T15:55:59.000Z | 2019-01-21T03:16:23.000Z | defmodule LeaderTest do
use ExUnit.Case
doctest Leader
test "the truth" do
assert 1 + 1 == 2
end
end
| 12.666667 | 23 | 0.666667 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.