hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
93f2fa5be3f1cc0d00a0bddc39d5f224ebb662c7 | 2,454 | exs | Elixir | config/prod.exs | mbuala/WhatChat | 44851a8b3afe0a8825b5e53e327c2f777065a60c | [
"MIT"
] | 2 | 2019-02-18T10:07:25.000Z | 2019-02-18T10:07:41.000Z | config/prod.exs | mbuala/WhatChat | 44851a8b3afe0a8825b5e53e327c2f777065a60c | [
"MIT"
] | null | null | null | config/prod.exs | mbuala/WhatChat | 44851a8b3afe0a8825b5e53e327c2f777065a60c | [
"MIT"
] | null | null | null | use Mix.Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :whatChat, WhatChatWeb.Endpoint,
http: [:inet6, port: System.get_env("PORT") || 4000],
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :whatChat, WhatChatWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# :inet6,
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :whatChat, WhatChatWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases (distillery)
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :whatChat, WhatChatWeb.Endpoint, server: true
#
# Note you can't rely on `System.get_env/1` when using releases.
# See the releases documentation accordingly.
# Finally import the config/prod.secret.exs which should be versioned
# separately.
import_config "prod.secret.exs"
| 34.083333 | 69 | 0.713936 |
93f32469616d5619b932aecdb51ff49d1e82202e | 3,431 | ex | Elixir | lib/elixir/lib/range.ex | lytedev/elixir | dc25bb8e1484e2328eef819402d268dec7bb908a | [
"Apache-2.0"
] | 1 | 2018-08-08T12:15:48.000Z | 2018-08-08T12:15:48.000Z | lib/elixir/lib/range.ex | lytedev/elixir | dc25bb8e1484e2328eef819402d268dec7bb908a | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/range.ex | lytedev/elixir | dc25bb8e1484e2328eef819402d268dec7bb908a | [
"Apache-2.0"
] | 1 | 2021-09-30T01:21:02.000Z | 2021-09-30T01:21:02.000Z | defmodule Range do
@moduledoc """
Defines a range.
A range represents a discrete number of values where
the first and last values are integers.
Ranges can be either increasing (`first <= last`) or
decreasing (`first > last`). Ranges are also always
inclusive.
A range is represented internally as a struct. However,
the most common form of creating and matching on ranges
is via the `../2` macro, auto-imported from `Kernel`:
iex> range = 1..3
1..3
iex> first..last = range
iex> first
1
iex> last
3
A range implements the `Enumerable` protocol, which means
functions in the `Enum` module can be used to work with
ranges:
iex> range = 1..10
1..10
iex> Enum.reduce(range, 0, fn i, acc -> i * i + acc end)
385
iex> Enum.count(range)
10
iex> Enum.member?(range, 11)
false
iex> Enum.member?(range, 8)
true
"""
defstruct first: nil, last: nil
@type t :: %Range{first: integer, last: integer}
@type t(first, last) :: %Range{first: first, last: last}
@doc """
Creates a new range.
"""
@spec new(integer, integer) :: t
def new(first, last) when is_integer(first) and is_integer(last) do
%Range{first: first, last: last}
end
def new(first, last) do
raise ArgumentError,
"ranges (first..last) expect both sides to be integers, " <>
"got: #{inspect(first)}..#{inspect(last)}"
end
# TODO: Remove by 2.0
@doc false
@deprecated "Pattern match on first..last instead"
def range?(term)
def range?(first..last) when is_integer(first) and is_integer(last), do: true
def range?(_), do: false
end
defimpl Enumerable, for: Range do
def reduce(first..last, acc, fun) do
reduce(first, last, acc, fun, _up? = last >= first)
end
defp reduce(_first, _last, {:halt, acc}, _fun, _up?) do
{:halted, acc}
end
defp reduce(first, last, {:suspend, acc}, fun, up?) do
{:suspended, acc, &reduce(first, last, &1, fun, up?)}
end
defp reduce(first, last, {:cont, acc}, fun, _up? = true) when first <= last do
reduce(first + 1, last, fun.(first, acc), fun, _up? = true)
end
defp reduce(first, last, {:cont, acc}, fun, _up? = false) when first >= last do
reduce(first - 1, last, fun.(first, acc), fun, _up? = false)
end
defp reduce(_, _, {:cont, acc}, _fun, _up) do
{:done, acc}
end
def member?(first..last, value) when is_integer(value) do
if first <= last do
{:ok, first <= value and value <= last}
else
{:ok, last <= value and value <= first}
end
end
def member?(_.._, _value) do
{:ok, false}
end
def count(first..last) do
if first <= last do
{:ok, last - first + 1}
else
{:ok, first - last + 1}
end
end
def slice(first..last) do
if first <= last do
{:ok, last - first + 1, &slice_asc(first + &1, &2)}
else
{:ok, first - last + 1, &slice_desc(first - &1, &2)}
end
end
defp slice_asc(current, 1), do: [current]
defp slice_asc(current, remaining), do: [current | slice_asc(current + 1, remaining - 1)]
defp slice_desc(current, 1), do: [current]
defp slice_desc(current, remaining), do: [current | slice_desc(current - 1, remaining - 1)]
end
defimpl Inspect, for: Range do
import Inspect.Algebra
def inspect(first..last, opts) do
concat([to_doc(first, opts), "..", to_doc(last, opts)])
end
end
| 25.414815 | 93 | 0.613232 |
93f32c496487209e522a965cda58b6b607e58810 | 4,009 | ex | Elixir | lib/models/comment.ex | heikkari/dashium | d3f58033efac81e7f747091f2810caf2ce8f45ab | [
"MIT"
] | 2 | 2021-09-05T23:23:00.000Z | 2021-09-11T16:26:11.000Z | lib/models/comment.ex | heikkari/dashium | d3f58033efac81e7f747091f2810caf2ce8f45ab | [
"MIT"
] | null | null | null | lib/models/comment.ex | heikkari/dashium | d3f58033efac81e7f747091f2810caf2ce8f45ab | [
"MIT"
] | null | null | null | defmodule Models.Comment do
defstruct [
:_id,
:author,
:likes,
:dislikes,
:body,
:type, # 0 - level, 1 - profile
:level,
:percent,
]
@spec to_string(Models.Comment) :: {:ok | :error, binary}
def to_string(%__MODULE__{ type: type } = comment) do
user = Models.User.get(comment.author)
values = case type do
0 -> case user do
-1 -> { :error, "Couldn't get user" }
user -> { :ok,
[
%{
1 => comment.level,
2 => comment.body |> Base.encode64,
3 => comment.author,
4 => comment.likes - comment.dislikes,
5 => comment.dislikes,
6 => comment._id,
7 => (
if comment.likes / (
case comment.dislikes do
0 -> 1
n -> n
end
) < Application.get_env(:app, :spam_ratio),
do: 0,
else: 1
),
8 => comment.author,
9 => Utils.age(comment),
10 => comment.percent,
11 => user.mod_level,
12 => Application.get_env(:app, :mod_colors)[user.mod_level]
|> Enum.map(fn value -> Integer.to_string(value, 16) end)
|> Enum.join(",")
},
%{
1 => user.username,
9 => user.icon_id,
10 => user.primary_color,
11 => user.secondary_color,
14 => user.icon_type,
15 => user.glow_id,
16 => user._id
}
]
}
end
1 -> {
:ok,
[ %{
2 => comment.body |> Base.encode64,
4 => comment.likes - comment.dislikes,
9 => Utils.age(comment),
6 => comment._id
} ]
}
_ -> throw "Invalid comment type"
end
with { :ok, list } <- values do
{ :ok, list
|> Enum.map(fn elem ->
Enum.map(elem, fn { key, value } -> Enum.join([ key, value ], "~") end)
|> Enum.join("~")
end)
|> Enum.join(":") }
end
end
@spec get(map, keyword) :: { :error, any } | { :ok, list }
def get(query, opts \\ []) when is_map(query) do
case Mongo.find(:mongo, "comments", query, opts) do
{ :error, e } -> { :error, e }
cursor -> { :ok, cursor |> Enum.map(fn doc -> new(doc) end) }
end
end
@spec by_id(integer) :: { :error, any } | { :ok, __MODULE__ }
def by_id(id) when is_integer(id) do
with { :ok, [ comment | _ ] } <- get(%{ _id: id }) do
{ :ok, comment }
end
end
@spec delete(integer, integer) :: { :ok | :error, binary | Mongo.DeleteResult.t | any }
def delete(user_id, comment_id)
when is_integer(user_id) and is_integer(comment_id)
do
with { :ok, comment } <- by_id(comment_id) do
can_delete = if comment.author !== user_id do
mod_level = Application.get_env(:app, :delete_comments)
user = Models.User.get(user_id)
user.mod_level >= mod_level
else
true
end
if can_delete do
Mongo.delete_one(:mongo, "comments", %{ _id: comment_id })
else
{ :error, "not allowed" }
end
end
end
@spec post(integer, binary, integer | nil, atom) :: { :ok | :error, Mongo.InsertOneResult.t() }
def post(user_id, body, level_id \\ nil, percent \\ nil, type \\ :profile)
when is_integer(user_id)
and is_nil(level_id) or is_integer(level_id)
and is_binary(body)
and is_atom(type)
do
# TODO: Check if `level_id` exists
Mongo.insert_one(:mongo, "comments", %{
_id: Utils.gen_id(),
author: user_id,
likes: 0,
dislikes: 0,
body: body,
type: case type do
:level -> 0
:profile -> 1
opt -> throw "Invalid option #{opt}"
end,
level_id: level_id,
percent: percent
})
end
use ExConstructor
end
| 27.272109 | 97 | 0.488152 |
93f37842d283521d4fba778e8cced814fbc513bf | 1,605 | ex | Elixir | clients/service_usage/lib/google_api/service_usage/v1/model/source_context.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/service_usage/lib/google_api/service_usage/v1/model/source_context.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/service_usage/lib/google_api/service_usage/v1/model/source_context.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceUsage.V1.Model.SourceContext do
@moduledoc """
`SourceContext` represents information about the source of a protobuf element, like the file in which it is defined.
## Attributes
* `fileName` (*type:* `String.t`, *default:* `nil`) - The path-qualified name of the .proto file that contained the associated protobuf element. For example: `"google/protobuf/source_context.proto"`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:fileName => String.t() | nil
}
field(:fileName)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceUsage.V1.Model.SourceContext do
def decode(value, options) do
GoogleApi.ServiceUsage.V1.Model.SourceContext.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceUsage.V1.Model.SourceContext do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.148936 | 203 | 0.744548 |
93f37988af00a5f3d1fdfd5169c3dbabe1623665 | 141 | ex | Elixir | lib/code_corps/random_icon_color/generator.ex | superdev999/Phoenix-project | ab13ac9366cdd0aa9581da7faf993b11aaa5344c | [
"MIT"
] | 275 | 2015-06-23T00:20:51.000Z | 2021-08-19T16:17:37.000Z | lib/code_corps/random_icon_color/generator.ex | superdev999/Phoenix-project | ab13ac9366cdd0aa9581da7faf993b11aaa5344c | [
"MIT"
] | 1,304 | 2015-06-26T02:11:54.000Z | 2019-12-12T21:08:00.000Z | lib/code_corps/random_icon_color/generator.ex | superdev999/Phoenix-project | ab13ac9366cdd0aa9581da7faf993b11aaa5344c | [
"MIT"
] | 140 | 2016-01-01T18:19:47.000Z | 2020-11-22T06:24:47.000Z | defmodule CodeCorps.RandomIconColor.Generator do
def generate do
~w(blue green light_blue pink purple yellow) |> Enum.random
end
end
| 23.5 | 63 | 0.77305 |
93f37dbcddc07d90b87ce26ff515e1fe6fc43fca | 179 | exs | Elixir | test/faker/lorem/shakespeare_test.exs | joshillian/faker | eeede9d7c35c543dcf6abe72dc476e755c80415b | [
"MIT"
] | 540 | 2015-01-05T16:31:49.000Z | 2019-09-25T00:40:27.000Z | test/faker/lorem/shakespeare_test.exs | echenim/faker | 15172b7d9c2b7711173a5faf3e45bfc4e45d6a97 | [
"MIT"
] | 172 | 2015-01-06T03:55:17.000Z | 2019-10-03T12:58:02.000Z | test/faker/lorem/shakespeare_test.exs | echenim/faker | 15172b7d9c2b7711173a5faf3e45bfc4e45d6a97 | [
"MIT"
] | 163 | 2015-01-05T21:24:54.000Z | 2019-10-03T07:59:42.000Z | defmodule Faker.ShakespeareTest do
use ExUnit.Case, async: true
doctest Faker.Lorem.Shakespeare
doctest Faker.Lorem.Shakespeare.En
doctest Faker.Lorem.Shakespeare.Ru
end
| 22.375 | 36 | 0.804469 |
93f3974a4acdf8f15e82d9cf775cb3e3f8ecdb4d | 5,186 | exs | Elixir | mix.exs | Zurga/cldr | 1aefe0cb27332b124db73ad3c1d03acb38a83365 | [
"Apache-2.0"
] | null | null | null | mix.exs | Zurga/cldr | 1aefe0cb27332b124db73ad3c1d03acb38a83365 | [
"Apache-2.0"
] | null | null | null | mix.exs | Zurga/cldr | 1aefe0cb27332b124db73ad3c1d03acb38a83365 | [
"Apache-2.0"
] | null | null | null | defmodule Cldr.Mixfile do
use Mix.Project
@version "2.18.0"
def project do
[
app: :ex_cldr,
version: @version,
elixir: "~> 1.6",
name: "Cldr",
source_url: "https://github.com/elixir-cldr/cldr",
docs: docs(),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps(),
description: description(),
package: package(),
test_coverage: [tool: ExCoveralls],
aliases: aliases(),
elixirc_paths: elixirc_paths(Mix.env()),
preferred_cli_env: preferred_cli_env(),
dialyzer: [
ignore_warnings: ".dialyzer_ignore_warnings",
plt_add_apps: ~w(gettext inets jason mix plug sweet_xml ratio)a
],
compilers: Mix.compilers()
]
end
defp description do
"""
Common Locale Data Repository (CLDR) functions for Elixir to localize and format numbers,
dates, lists and units with support for over 500 locales for internationalized (i18n) and
localized (L10N) applications.
"""
end
def application do
[
extra_applications: [:logger, :inets, :ssl, :eex, :ex_unit]
]
end
defp deps do
[
{:cldr_utils, "~> 2.12"},
{:decimal, "~> 1.6 or ~> 2.0"},
{:castore, "~> 0.1", optional: true},
{:certifi, "~> 2.5", optional: true},
{:jason, "~> 1.0", optional: true},
{:ex_doc, "~> 0.18", only: [:release, :dev]},
{:nimble_parsec, "~> 0.5 or ~> 1.0"},
{:gettext, "~> 0.13", optional: true},
{:stream_data, "~> 0.4", only: :test},
{:dialyxir, "~> 1.0", only: [:dev], runtime: false, optional: true},
{:plug, "~> 1.9", optional: true},
{:sweet_xml, "~> 0.6", only: [:dev, :test], optional: true},
{:benchee, "~> 1.0", only: :dev, runtime: false, optional: true},
{:telemetry, "~> 0.4.0", optional: true},
{:ratio, "~> 2.0", only: [:dev, :test], optional: true}
]
end
defp package do
[
maintainers: ["Kip Cole"],
licenses: ["Apache 2.0"],
links: links(),
files: [
"lib",
"src/plural_rules_lexer.xrl",
"src/plural_rules_parser.yrl",
"config",
"mix.exs",
"README*",
"CHANGELOG*",
"LICENSE*",
"priv/cldr/locales/en.json",
"priv/cldr/locales/en-001.json",
"priv/cldr/locales/root.json",
"priv/cldr/available_locales.json",
"priv/cldr/number_systems.json",
"priv/cldr/plural_rules.json",
"priv/cldr/version.json",
"priv/cldr/currencies.json",
"priv/cldr/territory_currencies.json",
"priv/cldr/weeks.json",
"priv/cldr/calendars.json",
"priv/cldr/calendar_preferences.json",
"priv/cldr/day_periods.json",
"priv/cldr/likely_subtags.json",
"priv/cldr/aliases.json",
"priv/cldr/territory_containers.json",
"priv/cldr/territory_containment.json",
"priv/cldr/territories.json",
"priv/cldr/territory_subdivisions.json",
"priv/cldr/territory_subdivision_containment.json",
"priv/cldr/plural_ranges.json",
"priv/cldr/timezones.json",
"priv/cldr/measurement_systems.json",
"priv/cldr/units.json",
"priv/cldr/time_preferences.json",
"priv/cldr/language_tags.ebin",
"priv/cldr/deprecated/measurement_system.json",
"priv/cldr/deprecated/unit_preference.json"
]
]
end
def links do
%{
"GitHub" => "https://github.com/elixir-cldr/cldr",
"Readme" => "https://github.com/elixir-cldr/cldr/blob/v#{@version}/README.md",
"Changelog" => "https://github.com/elixir-cldr/cldr/blob/v#{@version}/CHANGELOG.md"
}
end
def docs do
[
source_ref: "v#{@version}",
main: "readme",
logo: "logo.png",
extras: [
"README.md",
"LICENSE.md",
"CHANGELOG.md"
],
deps: [
ex_cldr_numbers: "https://hexdocs.pm/ex_cldr_numbers",
ex_cldr_dates_times: "https://hexdocs.pm/ex_cldr_dates_times",
ex_cldr_units: "https://hexdocs.pm/ex_cldr_units",
ex_cldr_lists: "https://hexdocs.pm/ex_cldr_lists"
],
groups_for_modules: groups_for_modules(),
skip_undefined_reference_warnings_on: ["changelog", "CHANGELOG.md"]
]
end
# Preferred CLI Environment details
#
# Defines the preferred environment for Mix tasks
defp preferred_cli_env() do
[
"cldr.generate_language_tags": :test
]
end
def aliases do
[]
end
defp groups_for_modules do
[
"Language Tag": ~r/^Cldr.LanguageTag.?/,
"Plural Rules": ~r/^Cldr.Number.?/,
Plugs: ~r/^Cldr.Plug.?/,
Gettext: ~r/^Cldr.Gettext.?/,
Helpers: [
Cldr.Calendar.Conversion,
Cldr.Digits,
Cldr.Helpers,
Cldr.Locale.Cache,
Cldr.Macros,
Cldr.Map,
Cldr.Math,
Cldr.String
],
"Example Backend": ~r/^MyApp.?/
]
end
defp elixirc_paths(:test), do: ["lib", "src", "mix/support/units", "mix/tasks", "test"]
defp elixirc_paths(:dev), do: ["lib", "mix", "src", "bench"]
defp elixirc_paths(_), do: ["lib", "src"]
end
| 29.299435 | 93 | 0.579059 |
93f397e69c534b44306b04ce6fb9af55514443e3 | 4,874 | ex | Elixir | lib/changelog_web/helpers/admin_helpers.ex | mlineen/changelog.com | fb661d0cf3a4db731d46ef7f1cec44a5d1f4581a | [
"MIT"
] | 1 | 2021-12-07T11:32:34.000Z | 2021-12-07T11:32:34.000Z | lib/changelog_web/helpers/admin_helpers.ex | grouville/changelog.com | bd32ea5da444f209722dd4a23d4e4b701b4e2253 | [
"MIT"
] | null | null | null | lib/changelog_web/helpers/admin_helpers.ex | grouville/changelog.com | bd32ea5da444f209722dd4a23d4e4b701b4e2253 | [
"MIT"
] | null | null | null | defmodule ChangelogWeb.Helpers.AdminHelpers do
use Phoenix.HTML
alias Changelog.Repo
alias ChangelogWeb.TimeView
def error_class(form, field), do: if(form.errors[field], do: "error", else: "")
def hidden_class(condition), do: if(condition, do: "hidden", else: "")
def error_message(form, field) do
case form.errors[field] do
{message, _} ->
content_tag :div, class: "ui pointing red basic label" do
message
end
nil ->
""
end
end
def file_toggle_buttons() do
content_tag(:span) do
[
content_tag(:a, "(use url)", href: "javascript:void(0);", class: "field-action use-url"),
content_tag(:a, "(use file)",
href: "javascript:void(0);",
class: "field-action use-file",
style: "display: none;"
)
]
end
end
def filter_select(filter, options) when is_binary(filter),
do: filter_select(String.to_atom(filter), options)
def filter_select(filter, options) do
content_tag(:select, class: "ui fluid selection dropdown js-filter") do
Enum.map(options, fn {value, label} ->
args =
if filter == value do
[value: value, selected: true]
else
[value: value]
end
content_tag(:option, label, args)
end)
end
end
def help_icon(help_text, classes \\ "") do
~e"""
<i class="help circle icon fluid <%= classes %>" data-popup="true" data-variation="wide" data-content="<%= help_text %>"></i>
"""
end
def info_icon(info_text) do
~e"""
<i class="info circle icon fluid" data-popup="true" data-variation="wide" data-content="<%= info_text %>"></i>
"""
end
def dropdown_link(text, options) do
options = Keyword.put(options, :class, "item")
link(text, options)
end
def icon_link(icon_name, options) do
options = Keyword.put(options, :class, "ui icon button")
link(content_tag(:i, "", class: "#{icon_name} icon"), options)
end
def modal_dropdown_link(view_module, text, modal_name, assigns, id) do
modal_id = "#{modal_name}-modal-#{id}"
form_id = "#{modal_name}-form-#{id}"
assigns = Map.merge(assigns, %{modal_id: modal_id, form_id: form_id})
modal = Phoenix.View.render(view_module, "_#{modal_name}_modal.html", assigns)
~e"""
<a href="javascript:void(0);" class="item js-modal" data-modal="#<%= modal_id %>"><%= text %></a>
<%= modal %>
"""
end
def modal_icon_button(view_module, icon_name, title, modal_name, assigns, id) do
modal_id = "#{modal_name}-modal-#{id}"
form_id = "#{modal_name}-form-#{id}"
assigns = Map.merge(assigns, %{modal_id: modal_id, form_id: form_id})
modal = Phoenix.View.render(view_module, "_#{modal_name}_modal.html", assigns)
~e"""
<button
type="button"
data-modal="#<%= modal_id %>"
class="ui icon button js-modal"
title="<%= title %>">
<i class="<%= icon_name %> icon"></i>
</button>
<%= modal %>
"""
end
def is_persisted(struct), do: is_integer(struct.id)
def is_loaded(nil), do: false
def is_loaded(%Ecto.Association.NotLoaded{}), do: false
def is_loaded(_association), do: true
def label_with_clear(attr, text) do
content_tag(:label, for: attr) do
[
content_tag(:span, text),
content_tag(:a, "(clear)", href: "javascript:void(0);", class: "field-action js-clear")
]
end
end
# Attempts to load an associated record on a form. Starts with direct
# relationship on form data, then tries querying Repo.
def load_from_form(form, module, relationship) do
form_data = Map.get(form.data, relationship)
foreign_key = "#{relationship}_id"
record_id =
Map.get(form.data, String.to_existing_atom(foreign_key)) || form.params[foreign_key]
cond do
is_loaded(form_data) -> form_data
is_nil(record_id) -> nil
true -> Repo.get(module, record_id)
end
end
def next_param(conn, default \\ nil), do: Map.get(conn.params, "next", default)
def semantic_calendar_field(form, field) do
~e"""
<div class="ui calendar">
<div class="ui input left icon">
<i class="calendar icon"></i>
<%= text_input(form, field, name: "", id: "") %>
<%= hidden_input(form, field) %>
</div>
</div>
"""
end
def submit_button(type, text, next \\ "") do
content_tag(:button, text,
class: "ui #{type} fluid basic button",
type: "submit",
name: "next",
value: next
)
end
def ts(ts), do: TimeView.ts(ts)
# See time.js for supported styles
def ts(ts, style), do: TimeView.ts(ts, style)
def up_or_down_class(a, b) when is_integer(a) and is_integer(b) do
if a > b do
"green"
else
"red"
end
end
def yes_no_options do
[{"Yes", true}, {"No", false}]
end
end
| 27.693182 | 129 | 0.609971 |
93f3a1b3102a85e846496ad24027aeb09d899b95 | 1,852 | ex | Elixir | lib/graphql/resolvers/version.ex | samuelnygaard/accent | db753badab1d885397b48a42ac3fb43024345467 | [
"BSD-3-Clause"
] | 1 | 2020-07-01T16:08:34.000Z | 2020-07-01T16:08:34.000Z | lib/graphql/resolvers/version.ex | samuelnygaard/accent | db753badab1d885397b48a42ac3fb43024345467 | [
"BSD-3-Clause"
] | 2 | 2021-09-28T05:37:00.000Z | 2022-02-26T10:10:15.000Z | lib/graphql/resolvers/version.ex | samuelnygaard/accent | db753badab1d885397b48a42ac3fb43024345467 | [
"BSD-3-Clause"
] | null | null | null | defmodule Accent.GraphQL.Resolvers.Version do
require Ecto.Query
alias Accent.{
GraphQL.Paginated,
Plugs.GraphQLContext,
Project,
Repo,
Version
}
alias Movement.Builders.NewVersion, as: NewVersionBuilder
alias Movement.Context
alias Movement.Persisters.NewVersion, as: NewVersionPersister
@typep version_operation :: {:ok, %{version: Version.t() | nil, errors: [String.t()] | nil}}
@spec create(Project.t(), %{name: String.t(), tag: String.t()}, GraphQLContext.t()) :: version_operation
def create(project, %{name: name, tag: tag}, info) do
%Context{}
|> Context.assign(:project, project)
|> Context.assign(:name, name)
|> Context.assign(:tag, tag)
|> Context.assign(:user_id, info.context[:conn].assigns[:current_user].id)
|> NewVersionBuilder.build()
|> NewVersionPersister.persist()
|> case do
{:ok, {%{assigns: %{version: version}}, _}} ->
{:ok, %{version: version, errors: nil}}
{:error, _reason} ->
{:ok, %{version: nil, errors: ["unprocessable_entity"]}}
end
end
@spec update(Version.t(), %{name: String.t(), tag: String.t()}, GraphQLContext.t()) :: version_operation
def update(version, args, _info) do
version
|> Version.changeset(%{name: args[:name], tag: args[:tag]})
|> Repo.update()
|> case do
{:ok, version} ->
{:ok, %{version: version, errors: nil}}
{:error, _reason} ->
{:ok, %{version: nil, errors: ["unprocessable_entity"]}}
end
end
@spec list_project(Project.t(), %{page: number()}, GraphQLContext.t()) :: {:ok, Paginated.t(Version.t())}
def list_project(project, args, _) do
project
|> Ecto.assoc(:versions)
|> Ecto.Query.order_by(desc: :inserted_at)
|> Repo.paginate(page: args[:page])
|> Paginated.format()
|> (&{:ok, &1}).()
end
end
| 30.866667 | 107 | 0.62041 |
93f3b9641afd02cd45e561bdf98084cd28955294 | 1,835 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/list_proposals_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/list_proposals_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/list_proposals_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.AdExchangeBuyer.V2beta1.Model.ListProposalsResponse do
@moduledoc """
Response message for listing proposals.
## Attributes
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - Continuation token for fetching the next page of results.
* `proposals` (*type:* `list(GoogleApi.AdExchangeBuyer.V2beta1.Model.Proposal.t)`, *default:* `nil`) - The list of proposals.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:nextPageToken => String.t(),
:proposals => list(GoogleApi.AdExchangeBuyer.V2beta1.Model.Proposal.t())
}
field(:nextPageToken)
field(:proposals, as: GoogleApi.AdExchangeBuyer.V2beta1.Model.Proposal, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.ListProposalsResponse do
def decode(value, options) do
GoogleApi.AdExchangeBuyer.V2beta1.Model.ListProposalsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.ListProposalsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.7 | 129 | 0.748229 |
93f418c7f8fefe911761304fdae52d43bcb8b8a3 | 1,566 | exs | Elixir | config/prod.exs | urfolomeus/data_noms | 144ecaf4e12ee0fef43736955e27dfe82779524c | [
"MIT"
] | null | null | null | config/prod.exs | urfolomeus/data_noms | 144ecaf4e12ee0fef43736955e27dfe82779524c | [
"MIT"
] | null | null | null | config/prod.exs | urfolomeus/data_noms | 144ecaf4e12ee0fef43736955e27dfe82779524c | [
"MIT"
] | null | null | null | use Mix.Config
# For production, we configure the host to read the PORT
# from the system environment. Therefore, you will need
# to set PORT=80 before running your server.
#
# You should also configure the url host to something
# meaningful, we use this information when generating URLs.
#
# Finally, we also include the path to a manifest
# containing the digested version of static files. This
# manifest is generated by the mix phoenix.digest task
# which you typically run after static files are built.
config :data_noms, DataNoms.Endpoint,
http: [port: {:system, "PORT"}],
url: [host: "example.com"],
cache_static_manifest: "priv/static/manifest.json"
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section:
#
# config :data_noms, DataNoms.Endpoint,
# ...
# https: [port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables point to a file on
# disk for the key and cert.
# Do not print debug messages in production
config :logger, level: :info
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :data_noms, DataNoms.Endpoint, server: true
#
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"
| 30.115385 | 64 | 0.728608 |
93f436fc84d421bf4ba0374a180b5d971a89015c | 1,239 | ex | Elixir | mia_server/lib/mia_server/udp.ex | SteffenBauer/mia_elixir | 569388b1f9ddd09f8e21a4d9275c42a81d469857 | [
"MIT"
] | null | null | null | mia_server/lib/mia_server/udp.ex | SteffenBauer/mia_elixir | 569388b1f9ddd09f8e21a4d9275c42a81d469857 | [
"MIT"
] | null | null | null | mia_server/lib/mia_server/udp.ex | SteffenBauer/mia_elixir | 569388b1f9ddd09f8e21a4d9275c42a81d469857 | [
"MIT"
] | null | null | null | defmodule MiaServer.UDP do
use GenServer
require Logger
def start_link() do
GenServer.start_link(__MODULE__, :ok, [name: __MODULE__])
end
## API
def reply(ip, port, nil) do
Logger.debug("No response to #{inspect ip}:#{inspect port}")
end
def reply(ip, port, message) do
GenServer.cast(__MODULE__, {ip, port, message})
end
## GenServer Callbacks
def init(:ok) do
Process.flag(:trap_exit, true)
port = Application.get_env(:mia_server, :port)
{:ok, socket} = :gen_udp.open(port, [:binary, active: true])
Logger.info "Listening on udp port #{port}"
{:ok, socket}
end
def handle_info({:udp, _socket, ip, port, data}, socket) do
Logger.debug("Received #{inspect data} from #{inspect ip}:#{inspect port}")
MiaServer.Parser.parse_packet(ip, port, String.trim(data))
{:noreply, socket}
end
def handle_cast({ip, port, message}, socket) do
Logger.debug("Send #{inspect message} to #{inspect ip}:#{inspect port}")
:gen_udp.send(socket, ip, port, message <> "\n")
{:noreply, socket}
end
def terminate(reason, socket) do
Logger.debug("Shutting down MIA UDP server for reason #{reason}, socket is #{inspect socket}")
:gen_udp.close(socket)
end
end
| 26.361702 | 98 | 0.66586 |
93f43c500464986b4495c4e04bc7923c07f5782f | 571 | ex | Elixir | lib/swagger/client/model/text_message_variable.ex | CordBoard/ostip-exari | a7a40e438c9a2ab358c28c164cec445a0345d15e | [
"Apache-2.0"
] | null | null | null | lib/swagger/client/model/text_message_variable.ex | CordBoard/ostip-exari | a7a40e438c9a2ab358c28c164cec445a0345d15e | [
"Apache-2.0"
] | null | null | null | lib/swagger/client/model/text_message_variable.ex | CordBoard/ostip-exari | a7a40e438c9a2ab358c28c164cec445a0345d15e | [
"Apache-2.0"
] | 1 | 2018-10-27T07:10:02.000Z | 2018-10-27T07:10:02.000Z | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Swagger.Client.Model.TextMessageVariable do
@moduledoc """
A key/value pair variable in a text message.
"""
@derive [Poison.Encoder]
defstruct [
:"key",
:"value"
]
@type t :: %__MODULE__{
:"key" => String.t,
:"value" => String.t
}
end
defimpl Poison.Decoder, for: Swagger.Client.Model.TextMessageVariable do
def decode(value, _options) do
value
end
end
| 20.392857 | 75 | 0.684764 |
93f4441c1726b8ab1baf3ec357bd4a562722cfd7 | 8,703 | ex | Elixir | lib/hex/state.ex | IvanIvanoff/hex | 90aa44fa8a1e59f2ae65f490edb984e4d6c853d1 | [
"Apache-2.0"
] | null | null | null | lib/hex/state.ex | IvanIvanoff/hex | 90aa44fa8a1e59f2ae65f490edb984e4d6c853d1 | [
"Apache-2.0"
] | null | null | null | lib/hex/state.ex | IvanIvanoff/hex | 90aa44fa8a1e59f2ae65f490edb984e4d6c853d1 | [
"Apache-2.0"
] | null | null | null | defmodule Hex.State do
@moduledoc false
@name __MODULE__
@api_url "https://hex.pm/api"
@home "~/.hex"
@pbkdf2_iters 32_768
def default_api_url(), do: @api_url
@config %{
api_key_read: %{
config: [:"$read_key"]
},
api_key_write: %{
config: [:"$write_key", :"$encrypted_key"]
},
api_key_write_unencrypted: %{
env: ["HEX_API_KEY"],
config: [:api_key]
},
api_url: %{
env: ["HEX_API_URL", "HEX_API"],
config: [:api_url],
default: @api_url,
fun: {__MODULE__, :trim_slash}
},
cache_home: %{
env_path_join: [{"HEX_HOME", ""}, {"XDG_CACHE_HOME", "hex"}],
default: @home,
fun: {__MODULE__, :path_expand}
},
config_home: %{
env_path_join: [{"HEX_HOME", ""}, {"XDG_CONFIG_HOME", "hex"}],
default: @home,
fun: {__MODULE__, :path_expand}
},
unsafe_https: %{
env: ["HEX_UNSAFE_HTTPS"],
config: [:unsafe_https],
default: false,
fun: {__MODULE__, :to_boolean}
},
unsafe_registry: %{
env: ["HEX_UNSAFE_REGISTRY"],
config: [:unsafe_registry],
default: false,
fun: {__MODULE__, :to_boolean}
},
no_verify_repo_origin: %{
env: ["HEX_NO_VERIFY_REPO_ORIGIN"],
config: [:no_verify_repo_origin],
default: false,
fun: {__MODULE__, :to_boolean}
},
http_concurrency: %{
env: ["HEX_HTTP_CONCURRENCY"],
config: [:http_concurrency],
default: 8,
fun: {__MODULE__, :to_integer}
},
http_proxy: %{
env: ["http_proxy", "HTTP_PROXY"],
config: [:http_proxy]
},
https_proxy: %{
env: ["https_proxy", "HTTPS_PROXY"],
config: [:https_proxy]
},
no_proxy: %{
env: ["no_proxy", "NO_PROXY"],
config: [:no_proxy]
},
http_timeout: %{
env: ["HEX_HTTP_TIMEOUT"],
config: [:http_timeout],
fun: {__MODULE__, :to_integer}
},
data_home: %{
env_path_join: [{"HEX_HOME", ""}, {"XDG_DATA_HOME", "hex"}],
default: @home,
fun: {__MODULE__, :path_expand}
},
mirror_url: %{
env: ["HEX_MIRROR_URL", "HEX_MIRROR"],
config: [:mirror_url],
fun: {__MODULE__, :trim_slash}
},
offline: %{
env: ["HEX_OFFLINE"],
config: [:offline],
default: false,
fun: {__MODULE__, :to_boolean}
},
resolve_verbose: %{
env: ["HEX_RESOLVE_VERBOSE"],
default: false,
fun: {__MODULE__, :to_boolean}
},
repos_key: %{
env: ["HEX_REPOS_KEY"],
config: [:repos_key]
},
diff_command: %{
env: ["HEX_DIFF_COMMAND"],
config: [:diff_command],
default: Mix.Tasks.Hex.Package.default_diff_command()
},
cacerts_path: %{
env: ["HEX_CACERTS_PATH"],
default: nil,
config: [:cacerts_path]
}
}
def start_link([]) do
global_config = Hex.Config.read()
Agent.start_link(__MODULE__, :init, [global_config], name: @name)
end
def child_spec(arg) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [arg]}
}
end
def stop() do
Agent.stop(@name)
end
def init(global_config) do
project_config = Keyword.get(Mix.Project.config(), :hex, [])
state =
Enum.into(@config, %{}, fn {key, spec} ->
{key, load_config_value(global_config, project_config, spec)}
end)
{_source, repos_key} = Map.fetch!(state, :repos_key)
Map.merge(state, %{
clean_pass: {:computed, true},
httpc_profile: {:computed, :hex},
pbkdf2_iters: {:computed, @pbkdf2_iters},
repos: {:computed, Hex.Config.read_repos(global_config, repos_key)},
repos_key: {:computed, repos_key},
ssl_version: {:computed, ssl_version()},
shell_process: {:computed, nil}
})
end
def refresh() do
Agent.update(@name, fn _ ->
init(Hex.Config.read())
end)
end
def fetch!(key) do
Agent.get(@name, fn state ->
case Map.fetch(state, key) do
{:ok, {_source, value}} ->
value
:error ->
raise KeyError, key: key, term: Hex.State
end
end)
end
def fetch!(key, transform) do
key
|> fetch!()
|> transform.()
end
def fetch_source!(key) do
Agent.get(@name, fn state ->
case Map.fetch(state, key) do
{:ok, {source, _value}} ->
source
:error ->
raise KeyError, key: key, term: Hex.State
end
end)
end
def put(key, value) do
Agent.update(@name, Map, :put, [key, {:computed, value}])
end
def update!(key, fun) do
Agent.update(@name, fn state ->
Map.update!(state, key, fn {source, value} ->
{source, fun.(value)}
end)
end)
end
def get_all() do
Agent.get(@name, & &1)
end
def put_all(map) do
Agent.update(@name, fn _ -> map end)
end
defp load_config_value(global_config, project_config, spec) do
result =
load_env(spec[:env]) ||
load_env_path_join(spec[:env_path_join]) ||
load_project_config(project_config, spec[:config]) ||
load_global_config(global_config, spec[:config])
{module, func} = spec[:fun] || {__MODULE__, :ok_wrap}
case result do
nil ->
{:ok, value} = apply(module, func, [spec[:default]])
{:default, value}
{source, value} ->
case apply(module, func, [value]) do
{:ok, value} ->
{source, value}
:error ->
print_invalid_config_error(value, source)
{:ok, value} = apply(module, func, [spec[:default]])
{:default, value}
end
end
end
defp print_invalid_config_error(value, source) do
value = inspect(value, pretty: true)
message = "Invalid Hex config, falling back to default. Source: #{source(source)} #{value}"
Hex.Shell.error(message)
end
defp source({:env, env_var}), do: "environment variable #{env_var}="
defp source({:project_config, key}), do: "mix.exs config #{key}: "
defp source({:global_config, key}), do: "Hex config (location: #{config_path()}) #{key}: "
defp config_path() do
:config_home
|> Hex.State.fetch!()
|> Path.join("hex.config")
end
defp load_env(keys) do
Enum.find_value(keys || [], fn key ->
if value = System.get_env(key) do
{{:env, key}, value}
else
nil
end
end)
end
defp load_env_path_join(keys) do
Enum.find_value(keys || [], fn {key, prefix} ->
if value = System.get_env(key) do
{{:env_path_join, {key, prefix}}, Path.join(value, prefix)}
else
nil
end
end)
end
defp load_global_config(config, keys) do
Enum.find_value(keys || [], fn key ->
if value = Keyword.get(config, key) do
{{:global_config, key}, value}
end
end)
end
defp load_project_config(config, keys) do
Enum.find_value(keys || [], fn key ->
if value = Keyword.get(config, key) do
{{:project_config, key}, value}
end
end)
end
def to_boolean(nil), do: {:ok, nil}
def to_boolean(false), do: {:ok, false}
def to_boolean(true), do: {:ok, true}
def to_boolean("0"), do: {:ok, false}
def to_boolean("1"), do: {:ok, true}
def to_boolean("false"), do: {:ok, false}
def to_boolean("true"), do: {:ok, true}
def to_boolean("FALSE"), do: {:ok, false}
def to_boolean("TRUE"), do: {:ok, true}
def to_boolean(_), do: :error
def to_integer(nil), do: {:ok, nil}
def to_integer(""), do: {:ok, nil}
def to_integer(integer) when is_integer(integer), do: {:ok, integer}
def to_integer(string) when is_binary(string) do
{int, _} = Integer.parse(string)
{:ok, int}
end
def to_integer(_), do: :error
def default(nil, value), do: value
def default(value, _), do: value
def trim_slash(nil), do: {:ok, nil}
def trim_slash(string) when is_binary(string),
do: {:ok, Hex.Stdlib.string_trim_leading(string, "/")}
def trim_slash(_), do: :error
def ssl_version() do
{:ok, version} = :application.get_key(:ssl, :vsn)
parse_ssl_version(version)
end
defp parse_ssl_version(version) do
version
|> List.to_string()
|> String.split(".")
|> Enum.take(3)
|> Enum.map(&to_integer/1)
|> version_pad()
|> List.to_tuple()
end
defp version_pad([major]), do: [major, 0, 0]
defp version_pad([major, minor]), do: [major, minor, 0]
defp version_pad([major, minor, patch]), do: [major, minor, patch]
defp version_pad([major, minor, patch | _]), do: [major, minor, patch]
def path_expand(path) when is_binary(path) do
{:ok, Path.expand(path)}
end
def path_expand(_), do: :error
def ok_wrap(arg), do: {:ok, arg}
def config, do: @config
end
| 24.865714 | 95 | 0.582443 |
93f4444d09abb0973c2c05882120eea82c4278f8 | 3,936 | exs | Elixir | test/unit/elsa/consumer/worker_test.exs | fxn/elsa | 3d2b5eaccebab48799f8e9b68f78120cd1c3c0c1 | [
"Apache-2.0"
] | null | null | null | test/unit/elsa/consumer/worker_test.exs | fxn/elsa | 3d2b5eaccebab48799f8e9b68f78120cd1c3c0c1 | [
"Apache-2.0"
] | null | null | null | test/unit/elsa/consumer/worker_test.exs | fxn/elsa | 3d2b5eaccebab48799f8e9b68f78120cd1c3c0c1 | [
"Apache-2.0"
] | null | null | null | defmodule Elsa.Consumer.WorkerTest do
use ExUnit.Case
use Placebo
import Checkov
import Elsa.Consumer.Worker, only: [kafka_message_set: 1]
import Elsa.Message, only: [kafka_message: 1]
describe "handle_info/2" do
setup do
Elsa.Registry.start_link(keys: :unique, name: Elsa.Supervisor.registry(:test_name))
allow(Elsa.Group.Manager.ack(any(), any(), any(), any(), any()), return: :ok)
allow(Elsa.Consumer.ack(any(), any(), any(), any()),
return: :ok,
meck_options: [:passthrough]
)
allow(:brod.subscribe(any(), any(), any(), any(), any()),
return: {:ok, self()},
meck_options: [:passthrough]
)
on_exit(fn ->
pid = Process.whereis(__MODULE__)
if pid != nil do
ref = Process.monitor(pid)
Process.exit(pid, :normal)
assert_receive {:DOWN, ^ref, _, _, _}
end
end)
init_args = [
connection: :test_name,
topic: "test-topic",
partition: 0,
generation_id: 5,
begin_offset: 13,
handler: Elsa.Consumer.WorkerTest.Handler,
handler_init_args: [],
config: []
]
Elsa.Consumer.Worker.start_link(init_args)
messages =
kafka_message_set(
topic: "test-topic",
partition: 0,
messages: [
kafka_message(offset: 13, key: "key1", value: "value1"),
kafka_message(offset: 14, key: "key2", value: "value2")
]
)
[messages: messages, state: create_state(init_args)]
end
data_test "handler can specifiy offset to ack", %{messages: messages, state: state} do
set_handler(fn messages ->
offset = messages |> List.first() |> Map.get(:offset)
{ack, offset}
end)
Elsa.Consumer.Worker.handle_info({:some_pid, messages}, state)
assert_called(Elsa.Group.Manager.ack(:test_name, "test-topic", 0, 5, 13))
where(ack: [:ack, :acknowledge])
end
data_test "handler can say #{response}", %{messages: messages, state: state} do
set_handler(fn _messags -> response end)
Elsa.Consumer.Worker.handle_info({:some_pid, messages}, state)
refute_called(Elsa.Group.Manager.ack(:test_name, "test-topic", 0, any(), any()))
refute_called(:brod.consume_ack(:test_name, "test-topic", 0, any()))
where(response: [:no_ack, :noop])
end
test "handler can say to continue to consume the ack but not ack consumer group", %{
messages: messages,
state: state
} do
set_handler(fn _messages -> :continue end)
Elsa.Consumer.Worker.handle_info({:some_pid, messages}, state)
refute_called(Elsa.Group.Manager.ack(:test_name, "test-topic", 0, any(), any()))
assert_called(Elsa.Consumer.ack(:test_name, "test-topic", 0, any()))
end
data_test "acking without a generation_id continues to consume messages", %{
messages: messages,
state: state
} do
set_handler(fn msgs ->
offset = msgs |> List.first() |> Map.get(:offset)
{ack, offset}
end)
Elsa.Consumer.Worker.handle_info({:some_pid, messages}, Map.put(state, :generation_id, nil))
refute_called Elsa.Group.Manager.ack(:test_name, "test-topic", 0, any(), any())
assert_called Elsa.Consumer.ack(:test_name, "test-topic", 0, any())
where ack: [:ack, :acknowledge]
end
end
defp create_state(init_args) do
state =
init_args
|> Enum.into(%{})
|> Map.delete(:begin_offset)
|> Map.put(:offset, 13)
struct(Elsa.Consumer.Worker.State, state)
end
defp set_handler(handler) do
Agent.start_link(fn -> handler end, name: __MODULE__)
end
end
defmodule Elsa.Consumer.WorkerTest.Handler do
use Elsa.Consumer.MessageHandler
def handle_messages(messages) do
function = Agent.get(Elsa.Consumer.WorkerTest, fn s -> s end)
function.(messages)
end
end
| 29.373134 | 98 | 0.617378 |
93f458b720e597e8b5cdd46314886cce1adffa4d | 3,296 | ex | Elixir | lib/planga/event/reducer.ex | ResiliaDev/Planga | b21d290dd7c2c7fa30571d0a5124d63bd09c0c9e | [
"MIT"
] | 37 | 2018-07-13T14:08:16.000Z | 2021-04-09T15:00:22.000Z | lib/planga/event/reducer.ex | ResiliaDev/Planga | b21d290dd7c2c7fa30571d0a5124d63bd09c0c9e | [
"MIT"
] | 9 | 2018-07-16T15:24:39.000Z | 2021-09-01T14:21:20.000Z | lib/planga/event/reducer.ex | ResiliaDev/Planga | b21d290dd7c2c7fa30571d0a5124d63bd09c0c9e | [
"MIT"
] | 3 | 2018-10-05T20:19:25.000Z | 2019-12-05T00:30:01.000Z | defmodule Planga.Event.Reducer do
@moduledoc """
This module knows/decides how to change the application state based on incooming events.
"""
alias TeaVent.Event
@doc """
Receives the current subject (the 'logical context') for this event as first parameter, and the event itself as the second parameter.
Supposed to return `{:ok, updated_subject} | {:error, problem}`
"""
@spec reducer(structure, event :: TeaVent.Event.t()) :: {:ok, structure} | {:error, any}
when structure: any
def reducer(structure, event)
def reducer(_, %Event{
topic: [:apps, _app_id, :conversations, _conversation_id, :messages],
name: :new_message,
meta: %{creator: conversation_user},
data: data
}) do
if Planga.Chat.ConversationUser.banned?(conversation_user) do
{:error, "You are not allowed to perform this action"}
else
Planga.Chat.Message.new(%{
content: data.message,
conversation_id: conversation_user.conversation_id,
sender_id: conversation_user.user_id,
conversation_user_id: conversation_user.id
})
end
end
def reducer(message = %Planga.Chat.Message{}, %Event{
topic: [:apps, _app_id, :conversations, _conversation_id, :messages, _message_id],
name: name,
meta: %{creator: conversation_user, started_at: started_at}
})
when name in [:hide_message, :show_message] do
case is_nil(conversation_user) ||
Planga.Chat.ConversationUser.is_moderator?(conversation_user) do
false ->
{:error, "You are not allowed to perform this action"}
true ->
case name do
:hide_message ->
{:ok, Planga.Chat.Message.hide_message(message, started_at)}
:show_message ->
{:ok, Planga.Chat.Message.show_message(message)}
end
end
end
def reducer(subject = %Planga.Chat.ConversationUser{}, %Event{
topic: [
:apps,
_app_id,
:conversations,
_conversation_id,
:conversation_users,
_remote_user_id
],
name: name,
meta: %{creator: conversation_user, started_at: started_at},
data: data
})
when name in [:ban, :unban] do
case is_nil(conversation_user) ||
Planga.Chat.ConversationUser.is_moderator?(conversation_user) do
false ->
{:error, "You are not allowed to perform this action"}
true ->
case name do
:ban ->
{:ok, Planga.Chat.ConversationUser.ban(subject, data.duration_minutes, started_at)}
:unban ->
{:ok, Planga.Chat.ConversationUser.unban(subject)}
end
end
end
def reducer(subject = %Planga.Chat.ConversationUser{}, %Event{
topic: [:apps, _app_id, :conversations, _conversation_id, :users, _remote_user_id],
name: :set_role,
meta: %{creator: creator},
data: data
}) do
case creator == nil || Planga.Chat.ConversationUser.is_moderator?(creator) do
false ->
{:error, "You are not allowed to perform this action"}
true ->
Planga.Chat.ConversationUser.set_role(subject, data.role)
end
end
def reducer(input, %Event{name: :noop}) do
{:ok, input}
end
end
| 31.390476 | 136 | 0.624697 |
93f4873c36627783d393bcfab11c422b8e7b12c9 | 701 | ex | Elixir | debian_nonhdfs/preinst.ex | sreedishps/scribe | 033b701f665b195229b8b93505ccb4cd2f8ac0f7 | [
"Apache-2.0"
] | 7 | 2015-01-29T14:41:28.000Z | 2021-03-09T01:29:58.000Z | debian_nonhdfs/preinst.ex | sreedishps/scribe | 033b701f665b195229b8b93505ccb4cd2f8ac0f7 | [
"Apache-2.0"
] | 2 | 2015-03-23T03:59:12.000Z | 2015-03-27T07:12:41.000Z | debian_nonhdfs/preinst.ex | sreedishps/scribe | 033b701f665b195229b8b93505ccb4cd2f8ac0f7 | [
"Apache-2.0"
] | 5 | 2015-03-03T06:54:00.000Z | 2018-10-16T21:08:02.000Z | #!/bin/sh
# preinst script for scribe-server-orig
#
# see: dh_installdeb(1)
set -e
# summary of how this script can be called:
# * <new-preinst> `install'
# * <new-preinst> `install' <old-version>
# * <new-preinst> `upgrade' <old-version>
# * <old-preinst> `abort-upgrade' <new-version>
# for details, see http://www.debian.org/doc/debian-policy/ or
# the debian-policy package
case "$1" in
install|upgrade)
;;
abort-upgrade)
;;
*)
echo "preinst called with unknown argument \`$1'" >&2
exit 1
;;
esac
# dh_installdeb will replace this with shell code automatically
# generated by other debhelper scripts.
#DEBHELPER#
exit 0
| 19.472222 | 63 | 0.630528 |
93f49024f97066f619afaf0bc1929f7881fb47d7 | 12,544 | exs | Elixir | test/wavex/chunks/bae_test.exs | basdirks/wavex | b465c374d4b8a1668187d6c056b1d299fe3a9ffe | [
"Apache-2.0"
] | null | null | null | test/wavex/chunks/bae_test.exs | basdirks/wavex | b465c374d4b8a1668187d6c056b1d299fe3a9ffe | [
"Apache-2.0"
] | null | null | null | test/wavex/chunks/bae_test.exs | basdirks/wavex | b465c374d4b8a1668187d6c056b1d299fe3a9ffe | [
"Apache-2.0"
] | null | null | null | defmodule Wavex.Chunk.BAETest do
@moduledoc false
use ExUnit.Case, async: true
use ExUnitProperties
alias Wavex.Chunk.BAE
@max_16_signed 2
|> :math.pow(15)
|> round()
@max_16_unsigned 2
|> :math.pow(16)
|> round()
@max_32_unsigned 2
|> :math.pow(32)
|> round()
@range_16_signed -@max_16_signed..(@max_16_signed - 1)
@range_16_unsigned 0..@max_16_unsigned
@range_32_unsigned 0..@max_32_unsigned
defp ascii_padded(max_length) do
ExUnitProperties.gen all length <- StreamData.integer(0..max_length),
binary <- StreamData.string(:ascii, length: length) do
binary <> String.duplicate(<<0>>, max_length - length)
end
end
defp binary_padded(max_length) do
ExUnitProperties.gen all length <- StreamData.integer(0..max_length),
binary <- StreamData.binary(length: length) do
binary <> String.duplicate(<<0>>, max_length - length)
end
end
defp zero_padded_integer(integer, count) do
integer
|> Integer.to_string()
|> String.pad_leading(count, "0")
end
defp date_time_sep, do: StreamData.member_of([?-, ?_, ?:, ?\s, ?.])
defp date_time do
ExUnitProperties.gen all unix_time <- StreamData.integer(0..250_000_000_000),
sep1 <- date_time_sep(),
sep2 <- date_time_sep(),
sep3 <- date_time_sep(),
sep4 <- date_time_sep() do
%DateTime{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second
} = DateTime.from_unix!(unix_time)
zero_padded_integer(year, 4) <>
<<sep1>> <>
zero_padded_integer(month, 2) <>
<<sep2>> <>
zero_padded_integer(day, 2) <>
zero_padded_integer(hour, 2) <>
<<sep3>> <> zero_padded_integer(minute, 2) <> <<sep4>> <> zero_padded_integer(second, 2)
end
end
defp binary_v0 do
ExUnitProperties.gen all description <- ascii_padded(256),
originator <- ascii_padded(32),
originator_reference <- ascii_padded(32),
origination_date_time <- date_time(),
time_reference_low <- StreamData.integer(@range_32_unsigned),
time_reference_high <- StreamData.integer(@range_32_unsigned),
coding_history <- StreamData.binary() do
"bext" <>
<<602 + byte_size(coding_history)::32-little>> <>
description <>
originator <>
originator_reference <>
origination_date_time <>
<<
time_reference_low::32-little,
time_reference_high::32-little,
0x0000::16-little,
0x00::8*254
>> <> coding_history
end
end
defp binary_v1 do
ExUnitProperties.gen all description <- ascii_padded(256),
originator <- ascii_padded(32),
originator_reference <- ascii_padded(32),
origination_date_time <- date_time(),
time_reference_low <- StreamData.integer(@range_32_unsigned),
time_reference_high <- StreamData.integer(@range_32_unsigned),
umid <- binary_padded(64),
coding_history <- StreamData.binary() do
"bext" <>
<<602 + byte_size(coding_history)::32-little>> <>
description <>
originator <>
originator_reference <>
origination_date_time <>
<<
time_reference_low::32-little,
time_reference_high::32-little,
0x0001::16-little
>> <> umid <> <<0x00::8*190>> <> coding_history
end
end
defp binary_v2 do
ExUnitProperties.gen all description <- ascii_padded(256),
originator <- ascii_padded(32),
originator_reference <- ascii_padded(32),
origination_date_time <- date_time(),
time_reference_low <- StreamData.integer(@range_32_unsigned),
time_reference_high <- StreamData.integer(@range_32_unsigned),
umid <- binary_padded(64),
loudness_value <- StreamData.integer(@range_16_signed),
loudness_range <- StreamData.integer(@range_16_signed),
max_true_peak_level <- StreamData.integer(@range_16_signed),
max_momentary_loudness <- StreamData.integer(@range_16_signed),
max_short_term_loudness <- StreamData.integer(@range_16_signed),
coding_history <- StreamData.binary() do
"bext" <>
<<602 + byte_size(coding_history)::32-little>> <>
description <>
originator <>
originator_reference <>
origination_date_time <>
<<
time_reference_low::32-little,
time_reference_high::32-little,
0x0002::16-little
>> <>
umid <>
<<
loudness_value::16-signed-little,
loudness_range::16-signed-little,
max_true_peak_level::16-signed-little,
max_momentary_loudness::16-signed-little,
max_short_term_loudness::16-signed-little,
0x00::8*180
>> <> coding_history
end
end
defp binary_v2_corrupt(position, part) do
size = byte_size(part)
ExUnitProperties.gen all binary <- binary_v2(),
<<
pre::binary-size(position),
_::binary-size(size),
post::binary
>> = binary do
<<
pre::binary,
part::binary,
post::binary
>>
end
end
test "the associated FourCC" do
assert BAE.four_cc() == "bext"
end
describe "reading a binary" do
property "containing a valid chunk, version 0" do
check all binary <- binary_v0() do
{:ok, chunk, ""} = BAE.read(binary)
with %BAE{
version: version,
description: description,
originator: originator,
originator_reference: originator_reference,
origination_time: origination_time,
origination_date: origination_date,
time_reference_low: time_reference_low,
time_reference_high: time_reference_high,
umid: umid,
loudness_value: loudness_value,
loudness_range: loudness_range,
max_true_peak_level: max_true_peak_level,
max_momentary_loudness: max_momentary_loudness,
max_short_term_loudness: max_short_term_loudness
} <- chunk do
assert version == 0x0000
assert is_binary(description)
assert byte_size(description) in 0..256
assert is_binary(originator)
assert byte_size(originator) in 0..32
assert is_binary(originator_reference)
assert byte_size(originator_reference) in 0..32
assert match?(%Date{}, origination_date)
assert match?(%Time{}, origination_time)
assert is_integer(time_reference_low)
assert time_reference_low in @range_32_unsigned
assert is_integer(time_reference_high)
assert time_reference_high in @range_32_unsigned
assert is_nil(umid)
assert is_nil(loudness_value)
assert is_nil(loudness_range)
assert is_nil(max_true_peak_level)
assert is_nil(max_momentary_loudness)
assert is_nil(max_short_term_loudness)
end
end
end
property "containing a valid chunk, version 1" do
check all binary <- binary_v1() do
{:ok, chunk, ""} = BAE.read(binary)
with %BAE{
version: version,
description: description,
originator: originator,
originator_reference: originator_reference,
origination_time: origination_time,
origination_date: origination_date,
time_reference_low: time_reference_low,
time_reference_high: time_reference_high,
umid: umid,
loudness_value: loudness_value,
loudness_range: loudness_range,
max_true_peak_level: max_true_peak_level,
max_momentary_loudness: max_momentary_loudness,
max_short_term_loudness: max_short_term_loudness
} <- chunk do
assert version == 0x0001
assert is_binary(description)
assert byte_size(description) in 0..256
assert is_binary(originator)
assert byte_size(originator) in 0..32
assert is_binary(originator_reference)
assert byte_size(originator_reference) in 0..32
assert match?(%Date{}, origination_date)
assert match?(%Time{}, origination_time)
assert is_integer(time_reference_low)
assert time_reference_low in @range_32_unsigned
assert is_integer(time_reference_high)
assert time_reference_high in @range_32_unsigned
assert is_binary(umid)
assert byte_size(umid)
assert is_nil(loudness_value)
assert is_nil(loudness_range)
assert is_nil(max_true_peak_level)
assert is_nil(max_momentary_loudness)
assert is_nil(max_short_term_loudness)
end
end
end
property "containing a valid chunk, version 2" do
check all binary <- binary_v2() do
{:ok,
%BAE{
version: version,
description: description,
originator: originator,
originator_reference: originator_reference,
origination_time: origination_time,
origination_date: origination_date,
time_reference_low: time_reference_low,
time_reference_high: time_reference_high,
umid: umid,
loudness_value: loudness_value,
loudness_range: loudness_range,
max_true_peak_level: max_true_peak_level,
max_momentary_loudness: max_momentary_loudness,
max_short_term_loudness: max_short_term_loudness
}, ""} = BAE.read(binary)
assert version == 0x0002
assert is_binary(description)
assert byte_size(description) in 0..256
assert is_binary(originator)
assert byte_size(originator) in 0..32
assert is_binary(originator_reference)
assert byte_size(originator_reference) in 0..32
assert match?(%Date{}, origination_date)
assert match?(%Time{}, origination_time)
assert time_reference_low in @range_32_unsigned
assert time_reference_high in @range_32_unsigned
assert is_binary(umid)
assert byte_size(umid)
assert is_integer(loudness_value)
assert loudness_value in @range_16_signed
assert is_integer(loudness_range)
assert loudness_range in @range_16_signed
assert is_integer(max_true_peak_level)
assert max_true_peak_level in @range_16_signed
assert is_integer(max_momentary_loudness)
assert max_momentary_loudness in @range_16_signed
assert is_integer(max_short_term_loudness)
assert max_short_term_loudness in @range_16_signed
end
end
property "containing chunk with an unexpected FourCC" do
check all actual <- StreamData.string(:ascii, length: 4),
expected = BAE.four_cc(),
actual != expected,
binary <- binary_v2_corrupt(0, actual) do
assert match?(
{:error, {:unexpected_four_cc, %{expected: ^expected, actual: ^actual}}},
BAE.read(binary)
)
end
end
property "containing a chunk with an unknown version" do
check all actual <- StreamData.integer(@range_16_unsigned),
not (actual in 0x0000..0x0002),
binary <- binary_v2_corrupt(354, <<actual::16-little>>) do
assert match?({:error, {:unsupported_bae_version, ^actual}}, BAE.read(binary))
end
end
end
end
| 37.66967 | 96 | 0.585858 |
93f4938f9daee582600a23ed7c1d347b29c42860 | 307 | ex | Elixir | lib/event_socket_outbound/call_mgmt.ex | xadhoom/freeswitch-outboundsocket | c0eb6fd43b7fc4f9e78afb3587057b90873bf3be | [
"MIT"
] | 12 | 2018-03-21T11:45:15.000Z | 2021-12-23T08:48:06.000Z | lib/event_socket_outbound/call_mgmt.ex | xadhoom/freeswitch-outboundsocket | c0eb6fd43b7fc4f9e78afb3587057b90873bf3be | [
"MIT"
] | 6 | 2018-08-29T10:58:10.000Z | 2021-03-02T20:23:03.000Z | lib/event_socket_outbound/call_mgmt.ex | xadhoom/freeswitch-outboundsocket | c0eb6fd43b7fc4f9e78afb3587057b90873bf3be | [
"MIT"
] | 4 | 2019-01-04T11:22:19.000Z | 2021-12-21T08:41:12.000Z | defmodule EventSocketOutbound.CallMgmt do
@moduledoc """
Callbacks for module will define call routing.
"""
@callback start_link(pid()) ::
{:ok, pid()}
| :ignore
| {:error, {:already_started, pid()} | term()}
@callback onEvent(pid(), map()) :: term()
end
| 27.909091 | 60 | 0.563518 |
93f498363659ab12c6d9a3a64187eea839e19b18 | 770 | ex | Elixir | day5/part1.ex | michaljanocko/aoc | fe3af43536013af1787e0c0685aec5bbb096bd87 | [
"WTFPL"
] | 1 | 2021-12-06T14:40:38.000Z | 2021-12-06T14:40:38.000Z | day5/part1.ex | michaljanocko/aoc | fe3af43536013af1787e0c0685aec5bbb096bd87 | [
"WTFPL"
] | null | null | null | day5/part1.ex | michaljanocko/aoc | fe3af43536013af1787e0c0685aec5bbb096bd87 | [
"WTFPL"
] | null | null | null | File.stream!("input")
|> Enum.map(&String.trim/1)
|> Enum.map(&String.split(&1, [" -> ", ","]))
|> List.flatten()
|> Enum.map(&String.to_integer/1)
|> Enum.chunk_every(2)
|> Enum.chunk_every(2)
|> Enum.reduce(%{}, fn [[x, y], [a, b]], acc ->
cond do
x == a ->
Map.merge(
Enum.reduce(y..b, acc, &Map.update(&2, "#{x}:#{&1}", 1, fn n -> n + 1 end)),
Enum.reduce(b..y, acc, &Map.update(&2, "#{x}:#{&1}", 1, fn n -> n + 1 end))
)
y == b ->
Map.merge(
Enum.reduce(x..a, acc, &Map.update(&2, "#{&1}:#{y}", 1, fn n -> n + 1 end)),
Enum.reduce(a..x, acc, &Map.update(&2, "#{&1}:#{y}", 1, fn n -> n + 1 end))
)
true ->
acc
end
end)
|> Map.to_list()
|> Enum.count(&(elem(&1, 1) > 1))
|> IO.inspect()
| 26.551724 | 84 | 0.462338 |
93f4a817eec30e0712074e5301139bccc6053083 | 1,093 | ex | Elixir | test/support/conn_case.ex | barnaba/archery-competition | cd5d302431429218aeb72c71fa96981667d8d95c | [
"MIT"
] | null | null | null | test/support/conn_case.ex | barnaba/archery-competition | cd5d302431429218aeb72c71fa96981667d8d95c | [
"MIT"
] | 6 | 2018-07-11T21:01:51.000Z | 2018-07-11T21:06:07.000Z | test/support/conn_case.ex | barnaba/archery-competition | cd5d302431429218aeb72c71fa96981667d8d95c | [
"MIT"
] | null | null | null | defmodule ArcheryCompetitionWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
import ArcheryCompetitionWeb.Router.Helpers
# The default endpoint for testing
@endpoint ArcheryCompetitionWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(ArcheryCompetition.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(ArcheryCompetition.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 28.025641 | 80 | 0.73376 |
93f4c9dd9051657739c6f1e039d09b6733e6e54e | 437 | exs | Elixir | test/chess/square_test.exs | 7hoenix/Chess | 8b57b2d6dd4235e2ee67622f6521f83cd237417b | [
"MIT"
] | 8 | 2018-11-12T13:45:42.000Z | 2022-03-15T14:45:26.000Z | test/chess/square_test.exs | 7hoenix/Chess | 8b57b2d6dd4235e2ee67622f6521f83cd237417b | [
"MIT"
] | 1 | 2021-08-30T08:58:53.000Z | 2021-10-13T09:15:36.000Z | test/chess/square_test.exs | 7hoenix/Chess | 8b57b2d6dd4235e2ee67622f6521f83cd237417b | [
"MIT"
] | 3 | 2020-12-08T22:32:37.000Z | 2022-01-27T17:54:55.000Z | defmodule Chess.SquareTest do
use ExUnit.Case
alias Chess.{Square, Position}
test "create squares for starting game" do
result = Square.prepare_for_new_game()
assert length(result) == 32
end
test "create squares from existed position" do
position = %Position{position: "rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR"}
result = Square.prepare_from_position(position)
assert length(result) == 32
end
end
| 23 | 81 | 0.7254 |
93f4e82b7cd7373b8979a3e2d906d2b0d645f7d6 | 5,975 | ex | Elixir | lib/content_gateway.ex | emerleite/content-gateway-elixir | c3dec2476002d2f78436b09f5d3b682161af0348 | [
"Apache-2.0"
] | 6 | 2016-12-26T19:34:11.000Z | 2019-04-02T22:04:51.000Z | lib/content_gateway.ex | emerleite/content-gateway-elixir | c3dec2476002d2f78436b09f5d3b682161af0348 | [
"Apache-2.0"
] | 1 | 2018-01-23T16:50:46.000Z | 2018-01-23T16:53:23.000Z | lib/content_gateway.ex | emerleite/content-gateway-elixir | c3dec2476002d2f78436b09f5d3b682161af0348 | [
"Apache-2.0"
] | 4 | 2017-04-20T19:09:01.000Z | 2018-01-23T16:49:08.000Z | defmodule ContentGateway do
defmacro __using__(_opts) do
quote do
# alias :exometer, as: Exometer
require Logger
def connection_timeout do
raise "not implemented"
end
def request_timeout do
raise "not implemented"
end
def user_agent do
"Elixir (Content Gateway)"
end
defoverridable [connection_timeout: 0, request_timeout: 0, user_agent: 0]
#API
def get(url, [cache_options: cache_options]), do: get(url, headers: %{}, options: %{}, cache_options: cache_options)
def get(url, [headers: headers, cache_options: cache_options]), do: get(url, headers: headers, options: %{}, cache_options: cache_options)
def get(url, [options: options, cache_options: cache_options]), do: get(url, headers: %{}, options: options, cache_options: cache_options)
def get(url, [headers: headers, options: options]), do: get(url, headers: headers, options: options, cache_options: %{})
def get(url, [headers: headers]), do: get(url, headers: headers, options: %{})
def get(url, [options: options]), do: get(url, headers: %{}, options: options)
def get(url, [headers: headers, options: options, cache_options: cache_options]) do
case Cachex.get(:content_gateway_cache, url) do
{:ok, value} ->
Logger.debug "[HIT] #{url}"
{:ok, value}
{:missing, nil} ->
url
|> request(headers, options)
|> process_response(url, cache_options[:expires_in], cache_options[:stale_expires_in])
end
end
def get(url, [headers: headers, options: options, cache_options: %{skip: true}]) do
request(url, headers, options)
end
def get(url) do
request(url)
end
def clear_cache(url) do
Cachex.del(:content_gateway_cache, url)
Cachex.del(:content_gateway_cache, "stale:#{url}")
end
defp request(url, headers \\ %{}, options \\ %{}) do
before_time = :os.timestamp
response =
case HTTPoison.get(url, headers |> merge_request_headers, options |> merge_request_options) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} -> {:ok, body}
{:ok, %HTTPoison.Response{status_code: 400, body: body}} -> {:bad_request, body}
{:ok, %HTTPoison.Response{status_code: 401, body: body}} -> {:unauthorized, body}
{:ok, %HTTPoison.Response{status_code: 403, body: body}} -> {:forbidden, body}
{:ok, %HTTPoison.Response{status_code: 404, body: body}} -> {:not_found, body}
{:ok, %HTTPoison.Response{status_code: status, body: body}} -> {:error, "Request failed [url:#{url}] [status:#{status}]"}
{:error, %HTTPoison.Error{reason: reason}} -> {:error, "Request Error [url:#{url}] - [#{reason}]"}
end
after_time = :os.timestamp
diff = :timer.now_diff after_time, before_time
host = URI.parse(url).host
# app_name = Application.get_env(:config_scope, :app_name)
# Exometer.update [app_name, :external, "resp_time", host], diff
# Exometer.update [app_name, :external, "resp_count", host], 1
response
end
defp merge_request_headers(headers) do
headers
|> Map.merge(%{"User-Agent" => user_agent})
end
defp merge_request_options(options) do
%{timeout: connection_timeout, recv_timeout: request_timeout}
|> Map.merge(options)
|> Map.to_list
end
defp parse_data(body) do
case Poison.Parser.parse(body) do
{:ok, json_data} -> json_data
{:error, reason} ->
Logger.error "Error parsing json data:#{body} - reason:#{reason}"
:parse_error
end
end
defp process_response({:bad_request, body}, url, _expires_in, _stale_expires_in) do
Logger.info "Bad Request [url:#{url}]"
{:error, :bad_request}
end
defp process_response({:unauthorized, body}, url, _expires_in, _stale_expires_in) do
Logger.info "Unauthorized [url:#{url}]"
{:error, :unauthorized}
end
defp process_response({:forbidden, body}, url, _expires_in, _stale_expires_in) do
Logger.info "Forbidden [url:#{url}]"
{:error, :forbidden}
end
defp process_response({:not_found, body}, url, _expires_in, _stale_expires_in) do
Logger.info "Resource Not Found [url:#{url}]"
{:error, :not_found}
end
defp process_response({:error, message}, url, _expires_in, _stale_expires_in) do
case Cachex.get(:content_gateway_cache, "stale:#{url}") do
{:ok, value} ->
Logger.info "[STALE] #{url}"
{:ok, value}
{:missing, nil} ->
Logger.warn message
{:error, :no_stale}
end
end
defp process_response({:ok, body}, url, expires_in, stale_expires_in) do
Logger.info "[MISS] #{url}"
body
|> parse_data
|> store_on_cache(url, expires_in, stale_expires_in)
|> make_response
end
defp make_response(:parse_error), do: {:error, :parse_error}
defp make_response(data), do: {:ok, data}
defp store_on_cache(data, key, expires_in, nil), do: store_on_cache(data, key, expires_in)
defp store_on_cache(:parse_error, _key, _expires_in), do: :parse_error
defp store_on_cache(data, key, expires_in, stale_expires_in) do
data
|> store_on_cache(key, expires_in)
|> store_on_cache("stale:#{key}", stale_expires_in)
end
defp store_on_cache(data, key, expires_in) when is_function(expires_in) do
Cachex.set(:content_gateway_cache, key, data, [ttl: expires_in.(data)])
data
end
defp store_on_cache(data, key, expires_in) do
Cachex.set(:content_gateway_cache, key, data, [ttl: expires_in])
data
end
end
end
end
| 39.569536 | 144 | 0.611548 |
93f51db81a2764186d14c775f3aef749dce7cfeb | 3,304 | ex | Elixir | clients/firebase_dynamic_links/lib/google_api/firebase_dynamic_links/v1/model/get_ios_reopen_attribution_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/firebase_dynamic_links/lib/google_api/firebase_dynamic_links/v1/model/get_ios_reopen_attribution_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/firebase_dynamic_links/lib/google_api/firebase_dynamic_links/v1/model/get_ios_reopen_attribution_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.FirebaseDynamicLinks.V1.Model.GetIosReopenAttributionResponse do
@moduledoc """
Response for iSDK to get reopen attribution for app universal link open
deeplinking. This endpoint is meant for only iOS requests.
## Attributes
* `deepLink` (*type:* `String.t`, *default:* `nil`) - The deep-link attributed the app universal link open. For both regular
FDL links and invite FDL links.
* `invitationId` (*type:* `String.t`, *default:* `nil`) - Optional invitation ID, for only invite typed requested FDL links.
* `iosMinAppVersion` (*type:* `String.t`, *default:* `nil`) - FDL input value of the "&imv=" parameter, minimum app version to be
returned to Google Firebase SDK running on iOS-9.
* `resolvedLink` (*type:* `String.t`, *default:* `nil`) - The entire FDL, expanded from a short link. It is the same as the
requested_link, if it is long.
* `utmCampaign` (*type:* `String.t`, *default:* `nil`) - Scion campaign value to be propagated by iSDK to Scion at app-reopen.
* `utmContent` (*type:* `String.t`, *default:* `nil`) - Scion content value to be propagated by iSDK to Scion at app-reopen.
* `utmMedium` (*type:* `String.t`, *default:* `nil`) - Scion medium value to be propagated by iSDK to Scion at app-reopen.
* `utmSource` (*type:* `String.t`, *default:* `nil`) - Scion source value to be propagated by iSDK to Scion at app-reopen.
* `utmTerm` (*type:* `String.t`, *default:* `nil`) - Scion term value to be propagated by iSDK to Scion at app-reopen.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:deepLink => String.t(),
:invitationId => String.t(),
:iosMinAppVersion => String.t(),
:resolvedLink => String.t(),
:utmCampaign => String.t(),
:utmContent => String.t(),
:utmMedium => String.t(),
:utmSource => String.t(),
:utmTerm => String.t()
}
field(:deepLink)
field(:invitationId)
field(:iosMinAppVersion)
field(:resolvedLink)
field(:utmCampaign)
field(:utmContent)
field(:utmMedium)
field(:utmSource)
field(:utmTerm)
end
defimpl Poison.Decoder,
for: GoogleApi.FirebaseDynamicLinks.V1.Model.GetIosReopenAttributionResponse do
def decode(value, options) do
GoogleApi.FirebaseDynamicLinks.V1.Model.GetIosReopenAttributionResponse.decode(value, options)
end
end
defimpl Poison.Encoder,
for: GoogleApi.FirebaseDynamicLinks.V1.Model.GetIosReopenAttributionResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.909091 | 133 | 0.697337 |
93f580570a6b9cf638ea7d348912d41541e81c7e | 692 | exs | Elixir | Elixir/benchmark/mix.exs | kkirstein/proglang-playground | d00be09ba2bb2351c6f5287cc4d93fcaf21f75fd | [
"MIT"
] | null | null | null | Elixir/benchmark/mix.exs | kkirstein/proglang-playground | d00be09ba2bb2351c6f5287cc4d93fcaf21f75fd | [
"MIT"
] | null | null | null | Elixir/benchmark/mix.exs | kkirstein/proglang-playground | d00be09ba2bb2351c6f5287cc4d93fcaf21f75fd | [
"MIT"
] | null | null | null | defmodule Benchmark.Mixfile do
use Mix.Project
def project do
[app: :benchmark,
version: "0.0.1",
elixir: "~> 1.7",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type `mix help deps` for more examples and options
defp deps do
[{:pipe, "~> 0.0.2"}]
end
end
| 20.969697 | 77 | 0.602601 |
93f585717307122b9b3538ceb44d3cd6c03db100 | 329 | ex | Elixir | lib/data_processor_web/controllers/handler_controller.ex | DylanGuedes/batch_processor | 2f3122a5f1a31557a39fac67aa62e297c39e8bf5 | [
"Apache-2.0"
] | null | null | null | lib/data_processor_web/controllers/handler_controller.ex | DylanGuedes/batch_processor | 2f3122a5f1a31557a39fac67aa62e297c39e8bf5 | [
"Apache-2.0"
] | 3 | 2018-08-17T13:42:45.000Z | 2018-08-17T17:05:11.000Z | lib/data_processor_web/controllers/handler_controller.ex | DylanGuedes/batch_processor | 2f3122a5f1a31557a39fac67aa62e297c39e8bf5 | [
"Apache-2.0"
] | null | null | null | defmodule DataProcessorWeb.HandlerController do
use DataProcessorWeb, :controller
@handlers [
DataProcessor.Handlers.LinearRegression,
DataProcessor.Handlers.StatisticalDescribe,
DataProcessor.Handlers.KMeans
]
def index(conn, _params) do
conn
|> render("index.html", handlers: @handlers)
end
end
| 21.933333 | 48 | 0.75076 |
93f5af5bc6bc5f8dc8f9230734f60fa30306735a | 14,332 | ex | Elixir | lib/livebook/evaluator.ex | chriskdon/livebook | b1b573b5f4b49b2e90585e8f511e7b261dcae550 | [
"Apache-2.0"
] | null | null | null | lib/livebook/evaluator.ex | chriskdon/livebook | b1b573b5f4b49b2e90585e8f511e7b261dcae550 | [
"Apache-2.0"
] | null | null | null | lib/livebook/evaluator.ex | chriskdon/livebook | b1b573b5f4b49b2e90585e8f511e7b261dcae550 | [
"Apache-2.0"
] | null | null | null | defmodule Livebook.Evaluator do
@moduledoc false
# A process responsible for evaluating notebook code.
#
# The process receives evaluation request and synchronously
# evaluates the given code within itself (rather than spawning a separate process).
# It stores the resulting binding and env as part of the state.
#
# It's important to store the binding in the same process
# where the evaluation happens, as otherwise we would have to
# send them between processes, effectively copying potentially large data.
#
# Note that this process is intentionally not a GenServer,
# because we during evaluation we may receive arbitrary
# messages and we don't want to consume them from the inbox,
# as GenServer does.
require Logger
alias Livebook.Evaluator
@type t :: %{pid: pid(), ref: reference()}
@type state :: %{
ref: reference(),
formatter: module(),
io_proxy: pid(),
object_tracker: pid(),
contexts: %{ref() => context()},
initial_context: context()
}
@typedoc """
An evaluation context.
"""
@type context :: %{binding: Code.binding(), env: Macro.Env.t(), id: binary()}
@typedoc """
A term used to identify evaluation.
"""
@type ref :: term()
@typedoc """
Either {:ok, result} for successful evaluation
or {:error, kind, error, stacktrace} for a failed one.
"""
@type evaluation_response ::
{:ok, any()} | {:error, Exception.kind(), any(), Exception.stacktrace()}
# We store evaluation envs in process dictionary, so that we can
# build intellisense context without asking the evaluator
@env_key :evaluation_env
@initial_env_key :initial_env
## API
@doc """
Starts the evaluator.
Options:
* `object_tracker` - a PID of `Livebook.Evaluator.ObjectTracker`, required
* `formatter` - a module implementing the `Livebook.Evaluator.Formatter` behaviour,
used for transforming evaluation response before it's sent to the client
"""
@spec start_link(keyword()) :: {:ok, pid(), t()} | {:error, term()}
def start_link(opts \\ []) do
case :proc_lib.start_link(__MODULE__, :init, [opts]) do
{:error, error} -> {:error, error}
evaluator -> {:ok, evaluator.pid, evaluator}
end
end
@doc """
Computes the memory usage from this evaluator node.
"""
@spec memory :: Livebook.Runtime.runtime_memory()
def memory do
%{
total: total,
processes: processes,
atom: atom,
binary: binary,
code: code,
ets: ets
} = Map.new(:erlang.memory())
%{
total: total,
processes: processes,
atom: atom,
binary: binary,
code: code,
ets: ets,
other: total - processes - atom - binary - code - ets
}
end
@doc """
Asynchronously parses and evaluates the given code.
Any exceptions are captured, in which case this method returns an error.
The evaluator stores the resulting binding and environment under `ref`.
Any subsequent calls may specify `prev_ref` pointing to a previous evaluation,
in which case the corresponding binding and environment are used during evaluation.
Evaluation response is sent to the process identified by `send_to` as `{:evaluation_response, ref, response, metadata}`.
Note that response is transformed with the configured formatter (identity by default).
## Options
* `:file` - file to which the evaluated code belongs. Most importantly,
this has an impact on the value of `__DIR__`.
"""
@spec evaluate_code(t(), pid(), String.t(), ref(), ref() | nil, keyword()) :: :ok
def evaluate_code(evaluator, send_to, code, ref, prev_ref \\ nil, opts \\ []) when ref != nil do
cast(evaluator, {:evaluate_code, send_to, code, ref, prev_ref, opts})
end
@doc """
Fetches evaluation context (binding and environment) by evaluation reference.
## Options
* `cached_id` - id of context that the sender may already have,
if it matches the fetched context the `{:error, :not_modified}`
tuple is returned instead
"""
@spec fetch_evaluation_context(t(), ref(), keyword()) ::
{:ok, context()} | {:error, :not_modified}
def fetch_evaluation_context(evaluator, ref, opts \\ []) do
cached_id = opts[:cached_id]
call(evaluator, {:fetch_evaluation_context, ref, cached_id})
end
@doc """
Fetches an evaluation context from another `Evaluator` process
and configures it as the initial context for this evaluator.
The process dictionary is also copied to match the given evaluator.
"""
@spec initialize_from(t(), t(), ref()) :: :ok
def initialize_from(evaluator, source_evaluator, source_evaluation_ref) do
call(evaluator, {:initialize_from, source_evaluator, source_evaluation_ref})
end
@doc """
Removes the evaluation identified by `ref` from history,
so that further evaluations cannot use it.
"""
@spec forget_evaluation(t(), ref()) :: :ok
def forget_evaluation(evaluator, ref) do
cast(evaluator, {:forget_evaluation, ref})
end
@doc """
Returns an empty intellisense context.
"""
@spec intellisense_context() :: Livebook.Intellisense.intellisense_context()
def intellisense_context() do
# TODO: Use Code.env_for_eval and eval_quoted_with_env on Elixir v1.14+
env = :elixir.env_for_eval([])
map_binding = fn fun -> fun.([]) end
%{env: env, map_binding: map_binding}
end
@doc """
Builds intellisense context from the given evaluation.
"""
@spec intellisense_context(t(), ref()) :: Livebook.Intellisense.intellisense_context()
def intellisense_context(evaluator, ref) do
{:dictionary, dictionary} = Process.info(evaluator.pid, :dictionary)
env =
find_in_dictionary(dictionary, {@env_key, ref}) ||
find_in_dictionary(dictionary, @initial_env_key)
map_binding = fn fun -> map_binding(evaluator, ref, fun) end
%{env: env, map_binding: map_binding}
end
defp find_in_dictionary(dictionary, key) do
Enum.find_value(dictionary, fn
{^key, value} -> value
_pair -> nil
end)
end
# Applies the given function to evaluation binding
defp map_binding(evaluator, ref, fun) do
call(evaluator, {:map_binding, ref, fun})
end
defp cast(evaluator, message) do
send(evaluator.pid, {:cast, evaluator.ref, message})
:ok
end
defp call(evaluator, message) do
call_ref = Process.monitor(evaluator.pid)
send(evaluator.pid, {:call, evaluator.ref, self(), call_ref, message})
receive do
{^call_ref, reply} ->
reply
{:DOWN, ^call_ref, _, _, reason} ->
exit({reason, {__MODULE__, :call, [evaluator, message]}})
end
end
## Callbacks
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :worker,
restart: :temporary
}
end
def init(opts) do
object_tracker = Keyword.fetch!(opts, :object_tracker)
formatter = Keyword.get(opts, :formatter, Evaluator.IdentityFormatter)
{:ok, io_proxy} = Evaluator.IOProxy.start_link(self(), object_tracker)
# Use the dedicated IO device as the group leader, so that
# intercepts all :stdio requests and also handles Livebook
# specific ones
Process.group_leader(self(), io_proxy)
evaluator_ref = make_ref()
state = initial_state(evaluator_ref, formatter, io_proxy, object_tracker)
evaluator = %{pid: self(), ref: evaluator_ref}
:proc_lib.init_ack(evaluator)
loop(state)
end
defp initial_state(evaluator_ref, formatter, io_proxy, object_tracker) do
context = initial_context()
Process.put(@initial_env_key, context.env)
%{
evaluator_ref: evaluator_ref,
formatter: formatter,
io_proxy: io_proxy,
object_tracker: object_tracker,
contexts: %{},
initial_context: context
}
end
defp loop(%{evaluator_ref: evaluator_ref} = state) do
receive do
{:call, ^evaluator_ref, pid, ref, message} ->
{:reply, reply, state} = handle_call(message, pid, state)
send(pid, {ref, reply})
loop(state)
{:cast, ^evaluator_ref, message} ->
{:noreply, state} = handle_cast(message, state)
loop(state)
end
end
defp initial_context() do
# TODO: Use Code.env_for_eval and eval_quoted_with_env on Elixir v1.14+
env = :elixir.env_for_eval([])
%{binding: [], env: env, id: random_id()}
end
defp handle_cast({:evaluate_code, send_to, code, ref, prev_ref, opts}, state) do
Evaluator.IOProxy.configure(state.io_proxy, send_to, ref)
Evaluator.ObjectTracker.remove_reference(state.object_tracker, {self(), ref})
context = get_context(state, prev_ref)
file = Keyword.get(opts, :file, "nofile")
context = put_in(context.env.file, file)
start_time = System.monotonic_time()
{result_context, response, code_error} =
case eval(code, context.binding, context.env) do
{:ok, result, binding, env} ->
result_context = %{binding: binding, env: env, id: random_id()}
response = {:ok, result}
{result_context, response, nil}
{:error, kind, error, stacktrace, code_error} ->
response = {:error, kind, error, stacktrace}
{context, response, code_error}
end
evaluation_time_ms = get_execution_time_delta(start_time)
state = put_context(state, ref, result_context)
Evaluator.IOProxy.flush(state.io_proxy)
Evaluator.IOProxy.clear_input_cache(state.io_proxy)
output = state.formatter.format_response(response)
metadata = %{
evaluation_time_ms: evaluation_time_ms,
memory_usage: memory(),
code_error: code_error
}
send(send_to, {:evaluation_response, ref, output, metadata})
:erlang.garbage_collect(self())
{:noreply, state}
end
defp handle_cast({:forget_evaluation, ref}, state) do
state = delete_context(state, ref)
Evaluator.ObjectTracker.remove_reference(state.object_tracker, {self(), ref})
:erlang.garbage_collect(self())
{:noreply, state}
end
defp handle_call({:fetch_evaluation_context, ref, cached_id}, _from, state) do
context = get_context(state, ref)
reply =
if context.id == cached_id do
{:error, :not_modified}
else
{:ok, context}
end
{:reply, reply, state}
end
defp handle_call({:initialize_from, source_evaluator, source_evaluation_ref}, _from, state) do
state =
case Evaluator.fetch_evaluation_context(
source_evaluator,
source_evaluation_ref,
cached_id: state.initial_context.id
) do
{:ok, context} ->
# If the context changed, mirror the process dictionary again
copy_process_dictionary_from(source_evaluator)
Process.put(@initial_env_key, context.env)
put_in(state.initial_context, context)
{:error, :not_modified} ->
state
end
{:reply, :ok, state}
end
defp handle_call({:map_binding, ref, fun}, _from, state) do
context = get_context(state, ref)
result = fun.(context.binding)
{:reply, result, state}
end
defp put_context(state, ref, context) do
Process.put({@env_key, ref}, context.env)
put_in(state.contexts[ref], context)
end
defp delete_context(state, ref) do
Process.delete({@env_key, ref})
{_, state} = pop_in(state.contexts[ref])
state
end
defp get_context(state, ref) do
Map.get_lazy(state.contexts, ref, fn -> state.initial_context end)
end
defp eval(code, binding, env) do
try do
quoted = Code.string_to_quoted!(code, file: env.file)
# TODO: Use Code.eval_quoted_with_env/3 on Elixir v1.14
{result, binding, env} = :elixir.eval_quoted(quoted, binding, env)
# TODO: Remove this line on Elixir v1.14 as binding propagates to env correctly
{_, binding, env} = :elixir.eval_forms(:ok, binding, env)
{:ok, result, binding, env}
catch
kind, error ->
stacktrace = prune_stacktrace(__STACKTRACE__)
code_error =
if code_error?(error) and (error.file == env.file and error.file != "nofile") do
%{line: error.line, description: error.description}
else
nil
end
{:error, kind, error, stacktrace, code_error}
end
end
defp code_error?(%SyntaxError{}), do: true
defp code_error?(%TokenMissingError{}), do: true
defp code_error?(%CompileError{}), do: true
defp code_error?(_error), do: false
# Adapted from https://github.com/elixir-lang/elixir/blob/1c1654c88adfdbef38ff07fc30f6fbd34a542c07/lib/iex/lib/iex/evaluator.ex#L355-L372
@elixir_internals [:elixir, :elixir_expand, :elixir_compiler, :elixir_module] ++
[:elixir_clauses, :elixir_lexical, :elixir_def, :elixir_map] ++
[:elixir_erl, :elixir_erl_clauses, :elixir_erl_pass]
defp prune_stacktrace(stacktrace) do
# The order in which each drop_while is listed is important.
# For example, the user may call Code.eval_string/2 in their code
# and if there is an error we should not remove erl_eval
# and eval_bits information from the user stacktrace.
stacktrace
|> Enum.reverse()
|> Enum.drop_while(&(elem(&1, 0) == :proc_lib))
|> Enum.drop_while(&(elem(&1, 0) == :gen_server))
|> Enum.drop_while(&(elem(&1, 0) == __MODULE__))
|> Enum.drop_while(&(elem(&1, 0) == :elixir))
|> Enum.drop_while(&(elem(&1, 0) in [:erl_eval, :eval_bits]))
|> Enum.reverse()
|> Enum.reject(&(elem(&1, 0) in @elixir_internals))
end
defp random_id() do
:crypto.strong_rand_bytes(20) |> Base.encode32(case: :lower)
end
defp copy_process_dictionary_from(source_evaluator) do
{:dictionary, dictionary} = Process.info(source_evaluator.pid, :dictionary)
for {key, value} <- dictionary, not internal_dictionary_key?(key) do
Process.put(key, value)
end
end
defp internal_dictionary_key?("$" <> _), do: true
defp internal_dictionary_key?({@env_key, _ref}), do: true
defp internal_dictionary_key?(@initial_env_key), do: true
defp internal_dictionary_key?(_), do: false
defp get_execution_time_delta(started_at) do
System.monotonic_time()
|> Kernel.-(started_at)
|> System.convert_time_unit(:native, :millisecond)
end
end
| 31.021645 | 139 | 0.667178 |
93f6073ffda986ffca10a51b38c449b1eb32d751 | 1,267 | ex | Elixir | debian_hdfs/emacsen-install.ex | sreedishps/scribe | 033b701f665b195229b8b93505ccb4cd2f8ac0f7 | [
"Apache-2.0"
] | 7 | 2015-01-29T14:41:28.000Z | 2021-03-09T01:29:58.000Z | debian_hdfs/emacsen-install.ex | sreedishps/scribe | 033b701f665b195229b8b93505ccb4cd2f8ac0f7 | [
"Apache-2.0"
] | 2 | 2015-03-23T03:59:12.000Z | 2015-03-27T07:12:41.000Z | debian_hdfs/emacsen-install.ex | sreedishps/scribe | 033b701f665b195229b8b93505ccb4cd2f8ac0f7 | [
"Apache-2.0"
] | 5 | 2015-03-03T06:54:00.000Z | 2018-10-16T21:08:02.000Z | #! /bin/sh -e
# /usr/lib/emacsen-common/packages/install/scribe-server-hdfs-orig
# Written by Jim Van Zandt <[email protected]>, borrowing heavily
# from the install scripts for gettext by Santiago Vila
# <[email protected]> and octave by Dirk Eddelbuettel <[email protected]>.
FLAVOR=$1
PACKAGE=scribe-server-hdfs-orig
if [ ${FLAVOR} = emacs ]; then exit 0; fi
echo install/${PACKAGE}: Handling install for emacsen flavor ${FLAVOR}
#FLAVORTEST=`echo $FLAVOR | cut -c-6`
#if [ ${FLAVORTEST} = xemacs ] ; then
# SITEFLAG="-no-site-file"
#else
# SITEFLAG="--no-site-file"
#fi
FLAGS="${SITEFLAG} -q -batch -l path.el -f batch-byte-compile"
ELDIR=/usr/share/emacs/site-lisp/${PACKAGE}
ELCDIR=/usr/share/${FLAVOR}/site-lisp/${PACKAGE}
# Install-info-altdir does not actually exist.
# Maybe somebody will write it.
if test -x /usr/sbin/install-info-altdir; then
echo install/${PACKAGE}: install Info links for ${FLAVOR}
install-info-altdir --quiet --section "" "" --dirname=${FLAVOR} /usr/share/info/${PACKAGE}.info.gz
fi
install -m 755 -d ${ELCDIR}
cd ${ELDIR}
FILES=`echo *.el`
cp ${FILES} ${ELCDIR}
cd ${ELCDIR}
cat << EOF > path.el
(setq load-path (cons "." load-path) byte-compile-warnings nil)
EOF
${FLAVOR} ${FLAGS} ${FILES}
rm -f *.el path.el
exit 0
| 27.543478 | 102 | 0.690608 |
93f6346b1d68e919d33db3e3c0e7f8c7e1d784ee | 769 | ex | Elixir | lib/horde/graceful_shutdown_manager.ex | amatalai/horde | b94c96e186450672279d40b754c36b80dea9e2b8 | [
"MIT"
] | null | null | null | lib/horde/graceful_shutdown_manager.ex | amatalai/horde | b94c96e186450672279d40b754c36b80dea9e2b8 | [
"MIT"
] | null | null | null | lib/horde/graceful_shutdown_manager.ex | amatalai/horde | b94c96e186450672279d40b754c36b80dea9e2b8 | [
"MIT"
] | null | null | null | defmodule Horde.GracefulShutdownManager do
use GenServer
require Logger
def child_spec(options) do
%{
id: __MODULE__,
start:
{GenServer, :start_link,
[__MODULE__, Keyword.get(options, :processes_pid), Keyword.take(options, [:name])]}
}
end
def init(processes_pid) do
{:ok, {processes_pid, false}}
end
def handle_call(:horde_shutting_down, _f, {processes_pid, _true_false}) do
{:reply, :ok, {processes_pid, true}}
end
def handle_cast({:shut_down, child_spec}, {processes_pid, true} = s) do
GenServer.cast(
processes_pid,
{:operation, {:add, [child_spec.id, {nil, child_spec}]}}
)
{:noreply, s}
end
def handle_cast({:shut_down, _child_id}, s) do
{:noreply, s}
end
end
| 21.971429 | 92 | 0.644993 |
93f6412d419bc307d49da21c5a97209d6f7b45d4 | 1,247 | ex | Elixir | web/views/error_helpers.ex | NorthernTwig/The-Fiddler | 33a508b3c544084c8c6afd5590d561c162466975 | [
"MIT"
] | 2 | 2017-07-25T09:12:57.000Z | 2017-07-25T11:25:07.000Z | web/views/error_helpers.ex | NorthernTwig/The-Fiddler | 33a508b3c544084c8c6afd5590d561c162466975 | [
"MIT"
] | null | null | null | web/views/error_helpers.ex | NorthernTwig/The-Fiddler | 33a508b3c544084c8c6afd5590d561c162466975 | [
"MIT"
] | 1 | 2018-10-29T18:53:04.000Z | 2018-10-29T18:53:04.000Z | defmodule TakeItForASpin.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
if error = form.errors[field] do
content_tag :span, translate_error(error), class: "help-block"
end
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# Because error messages were defined within Ecto, we must
# call the Gettext module passing our Gettext backend. We
# also use the "errors" domain as translations are placed
# in the errors.po file.
# Ecto will pass the :count keyword if the error message is
# meant to be pluralized.
# On your own code and templates, depending on whether you
# need the message to be pluralized or not, this could be
# written simply as:
#
# dngettext "errors", "1 file", "%{count} files", count
# dgettext "errors", "is invalid"
#
if count = opts[:count] do
Gettext.dngettext(TakeItForASpin.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(TakeItForASpin.Gettext, "errors", msg, opts)
end
end
end
| 30.414634 | 80 | 0.672815 |
93f66d2a532253ea3025f49247bca39e669fecb1 | 6,047 | exs | Elixir | test/phoenix_html/inputs_for_test.exs | aptinio/phoenix_html | 2648511437bdc9f054774adc8a9a4f01e3c5d4ad | [
"MIT"
] | 310 | 2015-05-03T13:08:41.000Z | 2022-03-11T14:53:20.000Z | test/phoenix_html/inputs_for_test.exs | aptinio/phoenix_html | 2648511437bdc9f054774adc8a9a4f01e3c5d4ad | [
"MIT"
] | 347 | 2015-05-03T19:01:00.000Z | 2022-02-22T11:56:17.000Z | test/phoenix_html/inputs_for_test.exs | aptinio/phoenix_html | 2648511437bdc9f054774adc8a9a4f01e3c5d4ad | [
"MIT"
] | 268 | 2015-05-03T18:53:32.000Z | 2022-03-22T14:15:54.000Z | defmodule Phoenix.HTML.InputsForTest do
use ExUnit.Case, async: true
import Phoenix.HTML
import Phoenix.HTML.Form
@doc """
A function that executes `inputs_for/4` and
extracts its inner contents for assertion.
"""
def safe_inputs_for(field, opts \\ [], fun) do
mark = "--PLACEHOLDER--"
{multipart, opts} = Keyword.pop(opts, :multipart, false)
conn =
Plug.Test.conn(:get, "/foo", %{
"search" => %{
"date" => %{"year" => "2020", "month" => "4", "day" => "17"},
"dates" => %{
"0" => %{"year" => "2010", "month" => "4", "day" => "17"},
"1" => %{"year" => "2020", "month" => "4", "day" => "17"}
}
}
})
contents =
safe_to_string(
form_for(conn, "/", [as: :search, multipart: multipart], fn f ->
html_escape([mark, inputs_for(f, field, opts, fun), mark])
end)
)
[_, inner, _] = String.split(contents, mark)
inner
end
## Cardinality one
test "one: inputs_for/4 without default and field is not present" do
contents =
safe_inputs_for(:unknown, fn f ->
refute f.index
text_input(f, :year)
end)
assert contents ==
~s(<input id="search_unknown_year" name="search[unknown][year]" type="text">)
end
test "one: inputs_for/4 does not generate index" do
safe_inputs_for(:unknown, fn f ->
refute f.index
"ok"
end)
end
test "one: inputs_for/4 without default and field is present" do
contents =
safe_inputs_for(:date, fn f ->
text_input(f, :year)
end)
assert contents ==
~s(<input id="search_date_year" name="search[date][year]" type="text" value="2020">)
end
test "one: inputs_for/4 with default and field is not present" do
contents =
safe_inputs_for(:unknown, [default: %{year: 2015}], fn f ->
text_input(f, :year)
end)
assert contents ==
~s(<input id="search_unknown_year" name="search[unknown][year]" type="text" value="2015">)
end
test "one: inputs_for/4 with default and field is present" do
contents =
safe_inputs_for(:date, [default: %{year: 2015}], fn f ->
text_input(f, :year)
end)
assert contents ==
~s(<input id="search_date_year" name="search[date][year]" type="text" value="2020">)
end
test "one: inputs_for/4 with custom name and id" do
contents =
safe_inputs_for(:date, [as: :foo, id: :bar], fn f ->
text_input(f, :year)
end)
assert contents == ~s(<input id="bar_year" name="foo[year]" type="text" value="2020">)
end
## Cardinality many
test "many: inputs_for/4 with file field generates file input" do
contents =
safe_inputs_for(:unknown, [default: [%{}, %{}], multipart: true], fn f ->
assert f.index in [0, 1]
file_input(f, :file)
end)
assert contents ==
~s(<input id="search_unknown_0_file" name="search[unknown][0][file]" type="file">) <>
~s(<input id="search_unknown_1_file" name="search[unknown][1][file]" type="file">)
end
test "many: inputs_for/4 with default and field is not present" do
contents =
safe_inputs_for(:unknown, [default: [%{year: 2012}, %{year: 2018}]], fn f ->
assert f.index in [0, 1]
text_input(f, :year)
end)
assert contents ==
~s(<input id="search_unknown_0_year" name="search[unknown][0][year]" type="text" value="2012">) <>
~s(<input id="search_unknown_1_year" name="search[unknown][1][year]" type="text" value="2018">)
end
test "many: inputs_for/4 generates indexes" do
safe_inputs_for(:unknown, [default: [%{year: 2012}]], fn f ->
assert f.index == 0
"ok"
end)
safe_inputs_for(:unknown, [default: [%{year: 2012}, %{year: 2018}]], fn f ->
assert f.index in [0, 1]
"ok"
end)
end
test "many: inputs_for/4 with default and field is present" do
contents =
safe_inputs_for(:dates, [default: [%{year: 2012}, %{year: 2018}]], fn f ->
text_input(f, :year)
end)
assert contents ==
~s(<input id="search_dates_0_year" name="search[dates][0][year]" type="text" value="2010">) <>
~s(<input id="search_dates_1_year" name="search[dates][1][year]" type="text" value="2020">)
end
test "many: inputs_for/4 with name and id" do
contents =
safe_inputs_for(
:dates,
[default: [%{year: 2012}, %{year: 2018}], as: :foo, id: :bar],
fn f ->
text_input(f, :year)
end
)
assert contents ==
~s(<input id="bar_0_year" name="foo[0][year]" type="text" value="2010">) <>
~s(<input id="bar_1_year" name="foo[1][year]" type="text" value="2020">)
end
@prepend_append [
prepend: [%{year: 2008}],
append: [%{year: 2022}],
default: [%{year: 2012}, %{year: 2018}]
]
test "many: inputs_for/4 with prepend/append and field is not present" do
contents =
safe_inputs_for(:unknown, @prepend_append, fn f ->
text_input(f, :year)
end)
assert contents ==
~s(<input id="search_unknown_0_year" name="search[unknown][0][year]" type="text" value="2008">) <>
~s(<input id="search_unknown_1_year" name="search[unknown][1][year]" type="text" value="2012">) <>
~s(<input id="search_unknown_2_year" name="search[unknown][2][year]" type="text" value="2018">) <>
~s(<input id="search_unknown_3_year" name="search[unknown][3][year]" type="text" value="2022">)
end
test "many: inputs_for/4 with prepend/append and field is present" do
contents =
safe_inputs_for(:dates, @prepend_append, fn f ->
text_input(f, :year)
end)
assert contents ==
~s(<input id="search_dates_0_year" name="search[dates][0][year]" type="text" value="2010">) <>
~s(<input id="search_dates_1_year" name="search[dates][1][year]" type="text" value="2020">)
end
end
| 31.826316 | 113 | 0.577973 |
93f67719e99e6247aa6366feebf3f250c7c25671 | 537 | ex | Elixir | lib/monis_app_web/views/error_view.ex | monisapp/api | 4096d50da23e0b562a309b9d0ccf6b211f431d25 | [
"MIT"
] | 1 | 2020-04-24T19:40:46.000Z | 2020-04-24T19:40:46.000Z | lib/monis_app_web/views/error_view.ex | monisapp/api | 4096d50da23e0b562a309b9d0ccf6b211f431d25 | [
"MIT"
] | null | null | null | lib/monis_app_web/views/error_view.ex | monisapp/api | 4096d50da23e0b562a309b9d0ccf6b211f431d25 | [
"MIT"
] | null | null | null | defmodule MonisAppWeb.ErrorView do
use MonisAppWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.json", _assigns) do
# %{errors: %{detail: "Internal Server Error"}}
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.json" becomes
# "Not Found".
def template_not_found(template, _assigns) do
%{errors: %{detail: Phoenix.Controller.status_message_from_template(template)}}
end
end
| 31.588235 | 83 | 0.72067 |
93f6d564e2146aead47cdb8409dacc224bdb7ca0 | 1,163 | exs | Elixir | test/bisect_test.exs | odo/elixir_bisect | 59b40a8a0026bd051f4260b1ea2d170e4344db1e | [
"MIT"
] | null | null | null | test/bisect_test.exs | odo/elixir_bisect | 59b40a8a0026bd051f4260b1ea2d170e4344db1e | [
"MIT"
] | null | null | null | test/bisect_test.exs | odo/elixir_bisect | 59b40a8a0026bd051f4260b1ea2d170e4344db1e | [
"MIT"
] | null | null | null | defmodule BisectTest do
use ExUnit.Case
doctest Bisect
@backend Bisect.List
@double_list (1..100) |> Enum.into([]) |> Enum.map(& &1 * 2)
test "find" do
assert {0, 2} == Bisect.find(2, @backend, @double_list)
assert {99, 200} == Bisect.find(200, @backend, @double_list)
assert {41, 84} == Bisect.find(84, @backend, @double_list)
end
test "find off limts" do
assert {0, 2} == Bisect.find(-10, @backend, @double_list)
assert {99, 200} == Bisect.find(1000, @backend, @double_list)
end
test "find in between" do
assert {42, 86} == Bisect.find(85, @backend, @double_list)
end
test "find rightmost with repeats" do
# index[0, 1, 2, 3, 4, 5, 6, 7, 8]
list = [1, 1, 1, 2, 2, 2, 3, 3, 3]
assert {0, 1} == Bisect.find(0, @backend, list)
assert {2, 1} == Bisect.find(1, @backend, list)
assert {5, 2} == Bisect.find(2, @backend, list)
assert {8, 3} == Bisect.find(3, @backend, list)
assert {8, 3} == Bisect.find(4, @backend, list)
end
test "find next from unknown with repeats" do
# index[0, 1, 2, 3]
list = [1, 1, 3, 3]
assert {2, 3} == Bisect.find(2, @backend, list)
end
end
| 29.075 | 65 | 0.594153 |
93f6ec4abf76e186784400adadebb19a0f9a6cad | 357 | ex | Elixir | lib/webapp_web/controllers/admin/team_controller.ex | runhyve/webapp | 434b074f98c1ebac657b56062c1c1a54e683dea1 | [
"BSD-2-Clause"
] | 12 | 2019-07-02T14:30:06.000Z | 2022-03-12T08:22:18.000Z | lib/webapp_web/controllers/admin/team_controller.ex | runhyve/webapp | 434b074f98c1ebac657b56062c1c1a54e683dea1 | [
"BSD-2-Clause"
] | 9 | 2020-03-16T20:10:50.000Z | 2021-06-17T17:45:44.000Z | lib/webapp_web/controllers/admin/team_controller.ex | runhyve/webapp | 434b074f98c1ebac657b56062c1c1a54e683dea1 | [
"BSD-2-Clause"
] | null | null | null | defmodule WebappWeb.Admin.TeamController do
use WebappWeb, :controller
alias Webapp.{
Accounts,
Accounts.Team
}
plug :load_and_authorize_resource,
model: Team,
non_id_actions: [:index, :create, :new]
def index(conn, _params) do
teams = Accounts.list_teams()
render(conn, "index.html", teams: teams, user: nil)
end
end
| 19.833333 | 55 | 0.689076 |
93f6f1cd1f7995ea2c333d2370d3495648db68c2 | 1,232 | ex | Elixir | lib/crew_web/live/period_live/index.ex | anamba/crew | c25f6a1d6ddbe0b58da9d556ff53a641c4d2a7b1 | [
"BSL-1.0"
] | null | null | null | lib/crew_web/live/period_live/index.ex | anamba/crew | c25f6a1d6ddbe0b58da9d556ff53a641c4d2a7b1 | [
"BSL-1.0"
] | 5 | 2020-07-20T01:49:01.000Z | 2021-09-08T00:17:04.000Z | lib/crew_web/live/period_live/index.ex | anamba/crew | c25f6a1d6ddbe0b58da9d556ff53a641c4d2a7b1 | [
"BSL-1.0"
] | null | null | null | defmodule CrewWeb.PeriodLive.Index do
use CrewWeb, :live_view
alias Crew.Periods
alias Crew.Periods.Period
@impl true
def mount(_params, %{"site_id" => site_id}, socket) do
socket = assign(socket, :site_id, site_id)
{:ok, assign_new(socket, :periods, fn -> list_periods(site_id) end)}
end
@impl true
def handle_params(params, _url, socket) do
{:noreply, apply_action(socket, socket.assigns.live_action, params)}
end
defp apply_action(socket, :edit, %{"id" => id}) do
socket
|> assign(:page_title, "Edit #{gettext("Period")}")
|> assign(:period, Periods.get_period!(id))
end
defp apply_action(socket, :new, _params) do
socket
|> assign(:page_title, "New #{gettext("Period")}")
|> assign(:period, %Period{})
end
defp apply_action(socket, :index, _params) do
socket
|> assign(:page_title, gettext("Periods"))
|> assign(:period, nil)
end
@impl true
def handle_event("delete", %{"id" => id}, socket) do
period = Periods.get_period!(id)
{:ok, _} = Periods.delete_period(period)
{:noreply, assign(socket, :periods, list_periods(socket.assigns.site_id))}
end
defp list_periods(site_id) do
Periods.list_periods(site_id)
end
end
| 25.666667 | 78 | 0.665584 |
93f6fba13c5b84d450eadfb9ae1394eb53441362 | 1,501 | exs | Elixir | backend/test/getaways_web/schema/subscription/canceled_booking_test.exs | Prumme/Projet_phx_ex_gql | 6324af91f94f96ee1f8403d5397ab930347e3e4f | [
"Unlicense"
] | null | null | null | backend/test/getaways_web/schema/subscription/canceled_booking_test.exs | Prumme/Projet_phx_ex_gql | 6324af91f94f96ee1f8403d5397ab930347e3e4f | [
"Unlicense"
] | 6 | 2020-01-31T19:44:15.000Z | 2021-09-02T04:26:49.000Z | backend/test/getaways_web/schema/subscription/canceled_booking_test.exs | Prumme/Projet_phx_ex_gql | 6324af91f94f96ee1f8403d5397ab930347e3e4f | [
"Unlicense"
] | null | null | null | defmodule GetawaysWeb.Schema.Subscription.CanceledBookingTest do
use GetawaysWeb.SubscriptionCase, async: true
@mutation """
mutation ($bookingId: ID!) {
cancelBooking(bookingId: $bookingId) {
id
}
}
"""
test "canceled booking can be subscribed to", %{socket: socket} do
user = user_fixture()
place = place_fixture()
booking = booking_fixture(user, %{place_id: place.id})
subscription = """
subscription bookingChange {
bookingChange(placeId: #{place.id}) {
id
}
}
"""
#
# 1. Setup the subscription
#
ref = push_doc socket, subscription
assert_reply ref, :ok, %{subscriptionId: subscription_id}
#
# 2. Run a mutation to trigger the subscription
#
input = %{
"bookingId" => booking.id
}
conn = build_conn() |> auth_user(user)
conn = post conn, "/api",
query: @mutation,
variables: input
expected = %{
"data" => %{
"cancelBooking" => %{
"id" => Integer.to_string(booking.id)
}
}
}
assert expected == json_response(conn, 200)
#
# 3. Assert that the expected subscription data was pushed to us
#
expected = %{
result: %{
data: %{
"bookingChange" => %{
"id" => Integer.to_string(booking.id)
}
}
},
subscriptionId: subscription_id
}
assert_push "subscription:data", push
assert expected == push
end
end
| 21.442857 | 68 | 0.568288 |
93f7778ea216940e2bb3c1e17bd3e06396ccf8b9 | 1,305 | exs | Elixir | config/dev.exs | underhilllabs/big_snips | 7f1b59d2be45fe6a488d8e3ce7842e7cc867d676 | [
"MIT"
] | 3 | 2016-12-20T17:16:39.000Z | 2017-02-22T11:06:56.000Z | config/dev.exs | underhilllabs/big_snips | 7f1b59d2be45fe6a488d8e3ce7842e7cc867d676 | [
"MIT"
] | 1 | 2017-05-31T16:32:41.000Z | 2017-05-31T16:32:41.000Z | config/dev.exs | underhilllabs/big_snips | 7f1b59d2be45fe6a488d8e3ce7842e7cc867d676 | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :big_snips, BigSnips.Endpoint,
http: [port: 4001],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch", "--stdin",
cd: Path.expand("../", __DIR__)]]
# Watch static and templates for browser reloading.
config :big_snips, BigSnips.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{web/views/.*(ex)$},
~r{web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Configure your database
config :big_snips, BigSnips.Repo,
adapter: Ecto.Adapters.MySQL,
username: "snipsdb",
password: "password",
database: "bigsnips_dev",
hostname: "localhost",
pool_size: 5
| 29.659091 | 73 | 0.695019 |
93f77a1bc86fa4750fa1a6c7c5e3ef881f2358ec | 19,044 | ex | Elixir | lib/redix/pubsub/connection.ex | zoldar/redix_experiments | a249060f761ce85f87ad91cfab1dec74ee45aeb2 | [
"MIT"
] | null | null | null | lib/redix/pubsub/connection.ex | zoldar/redix_experiments | a249060f761ce85f87ad91cfab1dec74ee45aeb2 | [
"MIT"
] | null | null | null | lib/redix/pubsub/connection.ex | zoldar/redix_experiments | a249060f761ce85f87ad91cfab1dec74ee45aeb2 | [
"MIT"
] | null | null | null | defmodule Redix.PubSub.Connection do
@moduledoc false
@behaviour :gen_statem
alias Redix.{ConnectionError, Connector, Protocol}
defstruct [
:opts,
:transport,
:socket,
:continuation,
:backoff_current,
:last_disconnect_reason,
:connected_address,
subscriptions: %{},
monitors: %{}
]
@backoff_exponent 1.5
@impl true
def callback_mode(), do: :state_functions
@impl true
def init(opts) do
transport = if(opts[:ssl], do: :ssl, else: :gen_tcp)
data = %__MODULE__{opts: opts, transport: transport}
if opts[:sync_connect] do
with {:ok, socket, address} <- Connector.connect(data.opts),
:ok <- setopts(data, socket, active: :once) do
data = %__MODULE__{
data
| socket: socket,
last_disconnect_reason: nil,
backoff_current: nil,
connected_address: address
}
{:ok, :connected, data}
else
{:error, reason} -> {:stop, reason}
{:stop, reason} -> {:stop, reason}
end
else
send(self(), :handle_possible_erlang_bug)
{:ok, :state_needed_because_of_possible_erlang_bug, data}
end
end
## States
# If I use the action {:next_event, :internal, :connect} when returning
# {:ok, :disconnected, data} from init/1, then Erlang 20 (not 21) blows up saying:
# {:bad_return_from_init, {:next_events, :internal, :connect}}. The weird thing is
# that if I use `{:next_even, :internal, :connect}` it complains but with `:next_even`,
# but with `:next_event` it seems to add the final "s" (`:next_events`). No idea
# what's going on and no time to fix it.
def state_needed_because_of_possible_erlang_bug(:info, :handle_possible_erlang_bug, data) do
{:next_state, :disconnected, data, {:next_event, :internal, :connect}}
end
def state_needed_because_of_possible_erlang_bug(_event, _info, _data) do
{:keep_state_and_data, :postpone}
end
def disconnected(:internal, :handle_disconnection, data) do
:telemetry.execute([:redix, :disconnection], %{}, %{
connection: data.opts[:name] || self(),
address: data.connected_address,
reason: data.last_disconnect_reason
})
if data.opts[:exit_on_disconnection] do
{:stop, data.last_disconnect_reason}
else
:ok =
Enum.each(data.monitors, fn {pid, ref} ->
send(pid, ref, :disconnected, %{error: data.last_disconnect_reason})
end)
subscriptions =
Map.new(data.subscriptions, fn
{target_key, {:subscribed, subscribers}} ->
{target_key, {:disconnected, subscribers}}
{target_key, {:subscribing, subscribes, _unsubscribes}} ->
{target_key, {:disconnected, subscribes}}
{target_key, {:unsubscribing, resubscribers}} ->
{target_key, {:disconnected, resubscribers}}
end)
data = %{data | subscriptions: subscriptions, connected_address: nil}
{:keep_state, data}
end
end
def disconnected({:timeout, :reconnect}, nil, _data) do
{:keep_state_and_data, {:next_event, :internal, :connect}}
end
def disconnected(:internal, :connect, data) do
with {:ok, socket, address} <- Connector.connect(data.opts),
:ok <- setopts(data, socket, active: :once) do
:telemetry.execute([:redix, :connection], %{}, %{
connection: data.opts[:name] || self(),
address: address,
reconnection: not is_nil(data.last_disconnect_reason)
})
data = %__MODULE__{
data
| socket: socket,
last_disconnect_reason: nil,
backoff_current: nil,
connected_address: address
}
{:next_state, :connected, data, {:next_event, :internal, :handle_connection}}
else
{:error, reason} ->
:telemetry.execute([:redix, :failed_connection], %{}, %{
connection: data.opts[:name] || self(),
address: format_address(data),
reason: %ConnectionError{reason: reason}
})
disconnect(data, reason, _handle_disconnection? = false)
{:stop, reason} ->
{:stop, reason, data}
end
end
def disconnected({:call, from}, {operation, targets, pid}, data)
when operation in [:subscribe, :psubscribe] do
{data, ref} = monitor_new(data, pid)
:ok = :gen_statem.reply(from, {:ok, ref})
target_type =
case operation do
:subscribe -> :channel
:psubscribe -> :pattern
end
data =
Enum.reduce(targets, data, fn target_name, data_acc ->
update_in(data_acc.subscriptions[{target_type, target_name}], fn
{:disconnected, subscribers} -> {:disconnected, MapSet.put(subscribers, pid)}
nil -> {:disconnected, MapSet.new([pid])}
end)
end)
{:keep_state, data}
end
def disconnected({:call, from}, {operation, targets, pid}, data)
when operation in [:unsubscribe, :punsubscribe] do
:ok = :gen_statem.reply(from, :ok)
target_type =
case operation do
:unsubscribe -> :channel
:punsubscribe -> :pattern
end
data =
Enum.reduce(targets, data, fn target_name, data_acc ->
target_key = {target_type, target_name}
case data_acc.subscriptions[target_key] do
nil ->
data_acc
{:disconnected, subscribers} ->
subscribers = MapSet.delete(subscribers, pid)
if MapSet.size(subscribers) == 0 do
update_in(data_acc.subscriptions, &Map.delete(&1, target_key))
else
put_in(data_acc.subscriptions[target_key], {:disconnected, subscribers})
end
end
end)
data = demonitor_if_not_subscribed_to_anything(data, pid)
{:keep_state, data}
end
def connected(:internal, :handle_connection, data) do
if map_size(data.subscriptions) > 0 do
case resubscribe_after_reconnection(data) do
{:ok, data} -> {:keep_state, data}
{:error, reason} -> disconnect(data, reason, _handle_disconnection? = true)
end
else
{:keep_state, data}
end
end
def connected({:call, from}, {operation, targets, pid}, data)
when operation in [:subscribe, :psubscribe] do
{data, ref} = monitor_new(data, pid)
:ok = :gen_statem.reply(from, {:ok, ref})
with {:ok, data} <- subscribe_pid_to_targets(data, operation, targets, pid) do
{:keep_state, data}
end
end
def connected({:call, from}, {operation, targets, pid}, data)
when operation in [:unsubscribe, :punsubscribe] do
:ok = :gen_statem.reply(from, :ok)
with {:ok, data} <- unsubscribe_pid_from_targets(data, operation, targets, pid) do
data = demonitor_if_not_subscribed_to_anything(data, pid)
{:keep_state, data}
end
end
def connected(:info, {transport_closed, socket}, %__MODULE__{socket: socket} = data)
when transport_closed in [:tcp_closed, :ssl_closed] do
disconnect(data, transport_closed, _handle_disconnection? = true)
end
def connected(:info, {transport_error, socket, reason}, %__MODULE__{socket: socket} = data)
when transport_error in [:tcp_error, :ssl_error] do
disconnect(data, reason, _handle_disconnection? = true)
end
def connected(:info, {transport, socket, bytes}, %__MODULE__{socket: socket} = data)
when transport in [:tcp, :ssl] do
with :ok <- setopts(data, socket, active: :once),
{:ok, data} <- new_bytes(data, bytes) do
{:keep_state, data}
else
{:error, reason} -> disconnect(data, reason, _handle_disconnection? = true)
end
end
def connected(:info, {:DOWN, _ref, :process, pid, _reason}, data) do
data = update_in(data.monitors, &Map.delete(&1, pid))
targets = Map.keys(data.subscriptions)
channels = for {:channel, channel} <- targets, do: channel
patterns = for {:pattern, pattern} <- targets, do: pattern
with {:ok, data} <- unsubscribe_pid_from_targets(data, :unsubscribe, channels, pid),
{:ok, data} <- unsubscribe_pid_from_targets(data, :punsubscribe, patterns, pid) do
{:keep_state, data}
end
end
## Helpers
defp new_bytes(data, "") do
{:ok, data}
end
defp new_bytes(data, bytes) do
case (data.continuation || (&Protocol.parse/1)).(bytes) do
{:ok, resp, rest} ->
with {:ok, data} <- handle_pubsub_msg(data, resp),
do: new_bytes(%{data | continuation: nil}, rest)
{:continuation, continuation} ->
{:ok, %{data | continuation: continuation}}
end
end
defp handle_pubsub_msg(data, [operation, target, _subscribers_count])
when operation in ["subscribe", "psubscribe"] do
target_key =
case operation do
"subscribe" -> {:channel, target}
"psubscribe" -> {:pattern, target}
end
{:subscribing, subscribes, _unsubscribes} = data.subscriptions[target_key]
if MapSet.size(subscribes) == 0 do
case send_unsubscriptions(data, [target_key]) do
:ok ->
data = put_in(data.subscriptions[target_key], {:unsubscribing, MapSet.new()})
{:ok, data}
{:error, reason} ->
{:error, reason}
end
else
Enum.each(subscribes, &send_subscription_confirmation(data, &1, target_key))
data = put_in(data.subscriptions[target_key], {:subscribed, subscribes})
{:ok, data}
end
end
defp handle_pubsub_msg(data, [operation, target, _subscribers_count])
when operation in ["unsubscribe", "punsubscribe"] do
operation = String.to_existing_atom(operation)
target_key = key_for_target(operation, target)
{:unsubscribing, resubscribers} = data.subscriptions[target_key]
if MapSet.size(resubscribers) == 0 do
data = update_in(data.subscriptions, &Map.delete(&1, target_key))
{:ok, data}
else
case send_subscriptions(data, [target_key]) do
:ok ->
data =
put_in(data.subscriptions[target_key], {:subscribing, resubscribers, MapSet.new()})
{:ok, data}
{:error, reason} ->
{:error, reason}
end
end
end
defp handle_pubsub_msg(data, ["message", channel, payload]) do
properties = %{channel: channel, payload: payload}
handle_pubsub_message_with_payload(data, {:channel, channel}, :message, properties)
end
defp handle_pubsub_msg(data, ["pmessage", pattern, channel, payload]) do
properties = %{channel: channel, pattern: pattern, payload: payload}
handle_pubsub_message_with_payload(data, {:pattern, pattern}, :pmessage, properties)
end
defp handle_pubsub_message_with_payload(data, target_key, kind, properties) do
case data.subscriptions[target_key] do
{:subscribed, subscribers} ->
for pid <- subscribers do
send(pid, Map.fetch!(data.monitors, pid), kind, properties)
end
{:unsubscribing, _to_resubscribe} ->
:ok
end
{:ok, data}
end
# Subscribing.
defp subscribe_pid_to_targets(data, operation, targets, pid) do
target_type =
case operation do
:subscribe -> :channel
:psubscribe -> :pattern
end
{to_subscribe, data} =
Enum.flat_map_reduce(targets, data, fn target_name, data_acc ->
target_key = {target_type, target_name}
{target_state, data_acc} =
get_and_update_in(data_acc.subscriptions[target_key], &subscribe_pid_to_target(&1, pid))
case target_state do
:new ->
{[target_key], data_acc}
:already_subscribed ->
send_subscription_confirmation(data_acc, pid, target_key)
{[], data_acc}
:pending ->
{[], data_acc}
end
end)
case send_subscriptions(data, to_subscribe) do
:ok -> {:ok, data}
{:error, reason} -> disconnect(data, reason, _handle_disconnection? = true)
end
end
defp subscribe_pid_to_target(nil, pid) do
state = {:subscribing, MapSet.new([pid]), MapSet.new()}
{:new, state}
end
defp subscribe_pid_to_target({:subscribed, subscribers}, pid) do
state = {:subscribed, MapSet.put(subscribers, pid)}
{:already_subscribed, state}
end
defp subscribe_pid_to_target({:subscribing, subscribes, unsubscribes}, pid) do
state = {:subscribing, MapSet.put(subscribes, pid), MapSet.delete(unsubscribes, pid)}
{:pending, state}
end
defp subscribe_pid_to_target({:unsubscribing, resubscribers}, pid) do
state = {:unsubscribing, MapSet.put(resubscribers, pid)}
{:pending, state}
end
defp send_subscription_confirmation(data, pid, {:channel, channel}) do
send(pid, Map.fetch!(data.monitors, pid), :subscribed, %{channel: channel})
end
defp send_subscription_confirmation(data, pid, {:pattern, pattern}) do
send(pid, Map.fetch!(data.monitors, pid), :psubscribed, %{pattern: pattern})
end
defp send_subscriptions(_data, []) do
:ok
end
defp send_subscriptions(data, to_subscribe) do
channels = for {:channel, channel} <- to_subscribe, do: channel
patterns = for {:pattern, pattern} <- to_subscribe, do: pattern
pipeline =
case {channels, patterns} do
{_, []} -> [["SUBSCRIBE" | channels]]
{[], _} -> [["PSUBSCRIBE" | patterns]]
{_, _} -> [["SUBSCRIBE" | channels], ["PSUBSCRIBE" | patterns]]
end
data.transport.send(data.socket, Enum.map(pipeline, &Protocol.pack/1))
end
# Returns {targets_to_unsubscribe_from, data}.
defp unsubscribe_pid_from_targets(data, operation, targets, pid) do
target_type =
case operation do
:unsubscribe -> :channel
:punsubscribe -> :pattern
end
{to_unsubscribe, data} =
Enum.flat_map_reduce(targets, data, fn target_name, data_acc ->
target_key = {target_type, target_name}
{target_state, data_acc} =
get_and_update_in(
data_acc.subscriptions[target_key],
&unsubscribe_pid_from_target(&1, pid)
)
send_unsubscription_confirmation(data_acc, pid, target_key)
case target_state do
:now_empty -> {[target_key], data_acc}
_other -> {[], data_acc}
end
end)
case send_unsubscriptions(data, to_unsubscribe) do
:ok -> {:ok, data}
{:error, reason} -> disconnect(data, reason, _handle_disconnection? = true)
end
end
defp unsubscribe_pid_from_target({:subscribed, subscribers}, pid) do
if MapSet.size(subscribers) == 1 and MapSet.member?(subscribers, pid) do
state = {:unsubscribing, _resubscribers = MapSet.new()}
{:now_empty, state}
else
state = {:subscribed, MapSet.delete(subscribers, pid)}
{:noop, state}
end
end
defp unsubscribe_pid_from_target({:subscribing, subscribes, unsubscribes}, pid) do
state = {:subscribing, MapSet.delete(subscribes, pid), MapSet.put(unsubscribes, pid)}
{:noop, state}
end
defp unsubscribe_pid_from_target({:unsubscribing, resubscribers}, pid) do
state = {:unsubscribing, MapSet.delete(resubscribers, pid)}
{:noop, state}
end
defp unsubscribe_pid_from_target(_, _), do: :pop
defp send_unsubscription_confirmation(data, pid, {:channel, channel}) do
if ref = data.monitors[pid] do
send(pid, ref, :unsubscribed, %{channel: channel})
end
end
defp send_unsubscription_confirmation(data, pid, {:pattern, pattern}) do
if ref = data.monitors[pid] do
send(pid, ref, :punsubscribed, %{pattern: pattern})
end
end
defp send_unsubscriptions(_data, []) do
:ok
end
defp send_unsubscriptions(data, to_subscribe) do
channels = for {:channel, channel} <- to_subscribe, do: channel
patterns = for {:pattern, pattern} <- to_subscribe, do: pattern
pipeline =
case {channels, patterns} do
{_, []} -> [["UNSUBSCRIBE" | channels]]
{[], _} -> [["PUNSUBSCRIBE" | patterns]]
{_, _} -> [["UNSUBSCRIBE" | channels], ["PUNSUBSCRIBE" | patterns]]
end
data.transport.send(data.socket, Enum.map(pipeline, &Protocol.pack/1))
end
defp resubscribe_after_reconnection(data) do
data =
update_in(data.subscriptions, fn subscriptions ->
Map.new(subscriptions, fn {target_key, {:disconnected, subscribers}} ->
{target_key, {:subscribing, subscribers, MapSet.new()}}
end)
end)
with :ok <- send_subscriptions(data, Map.keys(data.subscriptions)) do
{:ok, data}
end
end
defp monitor_new(data, pid) do
case data.monitors do
%{^pid => ref} ->
{data, ref}
_ ->
ref = Process.monitor(pid)
data = put_in(data.monitors[pid], ref)
{data, ref}
end
end
defp demonitor_if_not_subscribed_to_anything(data, pid) do
still_subscribed_to_something? =
Enum.any?(data.subscriptions, fn
{_target, {:subscribing, subscribes, _unsubscribes}} -> pid in subscribes
{_target, {:subscribed, subscribers}} -> pid in subscribers
{_target, {:unsubscribing, resubscribers}} -> pid in resubscribers
end)
if still_subscribed_to_something? do
data
else
{monitor_ref, data} = pop_in(data.monitors[pid])
if monitor_ref, do: Process.demonitor(monitor_ref, [:flush])
data
end
end
defp key_for_target(:subscribe, channel), do: {:channel, channel}
defp key_for_target(:unsubscribe, channel), do: {:channel, channel}
defp key_for_target(:psubscribe, pattern), do: {:pattern, pattern}
defp key_for_target(:punsubscribe, pattern), do: {:pattern, pattern}
defp setopts(data, socket, opts) do
inets_mod(data.transport).setopts(socket, opts)
end
defp inets_mod(:gen_tcp), do: :inet
defp inets_mod(:ssl), do: :ssl
defp next_backoff(data) do
backoff_current = data.backoff_current || data.opts[:backoff_initial]
backoff_max = data.opts[:backoff_max]
next_backoff = round(backoff_current * @backoff_exponent)
backoff_current =
if backoff_max == :infinity do
next_backoff
else
min(next_backoff, backoff_max)
end
{backoff_current, put_in(data.backoff_current, backoff_current)}
end
def disconnect(data, reason, handle_disconnection?) do
{next_backoff, data} = next_backoff(data)
if data.socket do
_ = data.transport.close(data.socket)
end
data = put_in(data.last_disconnect_reason, %ConnectionError{reason: reason})
data = put_in(data.socket, nil)
actions = [{{:timeout, :reconnect}, next_backoff, nil}]
actions =
if handle_disconnection? do
[{:next_event, :internal, :handle_disconnection}] ++ actions
else
actions
end
{:next_state, :disconnected, data, actions}
end
defp send(pid, ref, kind, properties)
when is_pid(pid) and is_reference(ref) and is_atom(kind) and is_map(properties) do
send(pid, {:redix_pubsub, self(), ref, kind, properties})
end
defp format_address(%{opts: opts} = _state) do
if opts[:sentinel] do
"sentinel"
else
"#{opts[:host]}:#{opts[:port]}"
end
end
end
| 30.865478 | 98 | 0.641672 |
93f7bd6d3fb53dc06b0923d54c2ad13ab497590f | 1,644 | ex | Elixir | clients/slides/lib/google_api/slides/v1/model/write_control.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/slides/lib/google_api/slides/v1/model/write_control.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/slides/lib/google_api/slides/v1/model/write_control.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Slides.V1.Model.WriteControl do
@moduledoc """
Provides control over how write requests are executed.
## Attributes
* `requiredRevisionId` (*type:* `String.t`, *default:* `nil`) - The revision ID of the presentation required for the write request. If specified and the `required_revision_id` doesn't exactly match the presentation's current `revision_id`, the request will not be processed and will return a 400 bad request error.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:requiredRevisionId => String.t()
}
field(:requiredRevisionId)
end
defimpl Poison.Decoder, for: GoogleApi.Slides.V1.Model.WriteControl do
def decode(value, options) do
GoogleApi.Slides.V1.Model.WriteControl.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Slides.V1.Model.WriteControl do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.978723 | 318 | 0.749392 |
93f7c9b5df637a2668ab5992654ce3e4bab7ab3e | 2,620 | exs | Elixir | test/appsignal/nif_test.exs | cblavier/appsignal-elixir | 6e191a9e50acab3537a6331cd08c269b9e0fd7f4 | [
"MIT"
] | null | null | null | test/appsignal/nif_test.exs | cblavier/appsignal-elixir | 6e191a9e50acab3537a6331cd08c269b9e0fd7f4 | [
"MIT"
] | null | null | null | test/appsignal/nif_test.exs | cblavier/appsignal-elixir | 6e191a9e50acab3537a6331cd08c269b9e0fd7f4 | [
"MIT"
] | null | null | null | unless Code.ensure_loaded?(Appsignal.Agent) do
{_, _} = Code.eval_file("agent.exs")
end
defmodule Appsignal.NifTest do
alias Appsignal.Nif
use ExUnit.Case, async: true
import AppsignalTest.Utils, only: [is_reference_or_binary: 1]
test "whether the agent starts" do
assert :ok = Nif.start()
end
test "whether the agent stops" do
assert :ok = Nif.stop()
end
@tag :skip_env_test_no_nif
test "starting transaction returns a reference to the transaction resource" do
assert {:ok, reference} = Nif.start_transaction("transaction id", "http_request")
assert is_reference_or_binary(reference)
end
if Mix.env() not in [:test_no_nif] do
test "the nif is loaded" do
assert true == Nif.loaded?()
end
end
if Mix.env() in [:test_no_nif] do
test "the nif is not loaded" do
assert false == Nif.loaded?()
end
end
describe "create_root_span/1" do
test "returns an ok-tuple with a reference to the span" do
assert {:ok, ref} = Nif.create_root_span("http_request")
assert is_reference(ref)
end
end
describe "create_root_span_with_timestamp/2" do
setup do
{:ok, ref} = Nif.create_root_span_with_timestamp("http_request", 1_588_930_137, 508_176_000)
%{ref: ref}
end
test "returns a reference to the span", %{ref: ref} do
assert is_reference(ref)
end
@tag :skip_env_test_no_nif
test "sets the span's start time to the passed value", %{ref: ref} do
{:ok, json} = Nif.span_to_json(ref)
assert {:ok, %{"start_time" => 1_588_930_137}} = Jason.decode(json)
end
end
describe "create_child_span/3" do
test "returns an ok-tuple with a reference to the span" do
{:ok, parent} = Nif.create_root_span("http_request")
assert {:ok, ref} = Nif.create_child_span(parent)
assert is_reference(ref)
end
end
describe "create_child_span_with_timestamp/2" do
setup do
{:ok, parent} = Nif.create_root_span("http_request")
{:ok, ref} = Nif.create_child_span_with_timestamp(parent, 1_588_930_137, 508_176_000)
%{ref: ref}
end
test "returns a reference to the span", %{ref: ref} do
assert is_reference(ref)
end
@tag :skip_env_test_no_nif
test "sets the span's start time to the passed value", %{ref: ref} do
{:ok, json} = Nif.span_to_json(ref)
assert {:ok, %{"start_time" => 1_588_930_137}} = Jason.decode(json)
end
end
describe "close_span/1" do
test "returns :ok" do
{:ok, ref} = Nif.create_root_span("http_request")
assert Nif.close_span(ref) == :ok
end
end
end
| 26.464646 | 98 | 0.667557 |
93f7d5748bf0d8ede7feb37a6ebce1b9a8747721 | 1,173 | ex | Elixir | test/support/apps/example/lib/example_web/channels/user_socket.ex | mitchellhenke/torch | 2d0ab68f4e2d7f3bc37fbf7edbd1298b29b36e71 | [
"MIT"
] | 528 | 2019-09-13T15:10:36.000Z | 2022-03-31T10:28:27.000Z | test/support/apps/example/lib/example_web/channels/user_socket.ex | mitchellhenke/torch | 2d0ab68f4e2d7f3bc37fbf7edbd1298b29b36e71 | [
"MIT"
] | 133 | 2019-09-13T17:46:59.000Z | 2022-03-01T13:37:10.000Z | test/support/apps/example/lib/example_web/channels/user_socket.ex | mitchellhenke/torch | 2d0ab68f4e2d7f3bc37fbf7edbd1298b29b36e71 | [
"MIT"
] | 38 | 2019-10-29T20:37:13.000Z | 2022-03-03T05:19:33.000Z | defmodule ExampleWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", ExampleWeb.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# ExampleWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 30.868421 | 83 | 0.703325 |
93f82fdfc9ae5b7ee25558882e69af1d4e53dc7d | 490 | ex | Elixir | apps/xee2018/lib/xee2018/application.ex | RyoheiHayashi/xee2018 | 3b9ff03b13817563a5c7c75db1da360879a8a8d4 | [
"MIT"
] | 1 | 2018-09-05T10:31:50.000Z | 2018-09-05T10:31:50.000Z | apps/xee2018/lib/xee2018/application.ex | RyoheiHayashi/xee2018 | 3b9ff03b13817563a5c7c75db1da360879a8a8d4 | [
"MIT"
] | null | null | null | apps/xee2018/lib/xee2018/application.ex | RyoheiHayashi/xee2018 | 3b9ff03b13817563a5c7c75db1da360879a8a8d4 | [
"MIT"
] | null | null | null | defmodule Xee2018.Application do
@moduledoc """
The Xee2018 Application Service.
The xee2018 system business domain lives in this application.
Exposes API to clients such as the `Xee2018Web` application
for use in channels, controllers, and elsewhere.
"""
use Application
def start(_type, _args) do
import Supervisor.Spec, warn: false
Supervisor.start_link([
supervisor(Xee2018.Repo, []),
], strategy: :one_for_one, name: Xee2018.Supervisor)
end
end
| 24.5 | 63 | 0.726531 |
93f846fc8ea755a950a76d7310e1a7093aa4cc3b | 1,627 | ex | Elixir | clients/tasks/lib/google_api/tasks/v1/model/task_links.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/tasks/lib/google_api/tasks/v1/model/task_links.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/tasks/lib/google_api/tasks/v1/model/task_links.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Tasks.V1.Model.TaskLinks do
@moduledoc """
## Attributes
* `description` (*type:* `String.t`, *default:* `nil`) - The description. In HTML speak: Everything between <a> and </a>.
* `link` (*type:* `String.t`, *default:* `nil`) - The URL.
* `type` (*type:* `String.t`, *default:* `nil`) - Type of the link, e.g. "email".
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:description => String.t(),
:link => String.t(),
:type => String.t()
}
field(:description)
field(:link)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.Tasks.V1.Model.TaskLinks do
def decode(value, options) do
GoogleApi.Tasks.V1.Model.TaskLinks.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Tasks.V1.Model.TaskLinks do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.698113 | 125 | 0.690842 |
93f847b347822bcd0d8863518da554a7e9d498a1 | 1,336 | ex | Elixir | server/lib/events_app_web/controllers/event_controller.ex | kylesmith-1/blazeneu | 83cb68b8112bac8d51c9f92e709720d7e7ba1472 | [
"MIT"
] | null | null | null | server/lib/events_app_web/controllers/event_controller.ex | kylesmith-1/blazeneu | 83cb68b8112bac8d51c9f92e709720d7e7ba1472 | [
"MIT"
] | null | null | null | server/lib/events_app_web/controllers/event_controller.ex | kylesmith-1/blazeneu | 83cb68b8112bac8d51c9f92e709720d7e7ba1472 | [
"MIT"
] | 1 | 2021-04-10T18:37:30.000Z | 2021-04-10T18:37:30.000Z | defmodule CompanyTestWeb.EventController do
use CompanyTestWeb, :controller
alias CompanyTest.Events
alias CompanyTest.Events.Event
alias CompanyTestWeb.Plugs
plug Plugs.RequireAuth when action in [:create]
action_fallback CompanyTestWeb.FallbackController
def index(conn, _params) do
events = Events.list_events()
render(conn, "index.json", events: events)
end
def create(conn, %{"event" => event_params}) do
user = conn.assigns[:current_user]
event_params = Map.put(event_params, "user_id", user.id)
with {:ok, %Event{} = event} <- Events.create_event(event_params) do
conn
|> put_status(:created)
|> put_resp_header("location", Routes.event_path(conn, :show, event))
|> render("show.json", event: event)
end
end
def show(conn, %{"id" => id}) do
event = Events.get_event!(id)
render(conn, "show.json", event: event)
end
def update(conn, %{"id" => id, "event" => event_params}) do
event = Events.get_event!(id)
with {:ok, %Event{} = event} <- Events.update_event(event, event_params) do
render(conn, "show.json", event: event)
end
end
def delete(conn, %{"id" => id}) do
event = Events.get_event!(id)
with {:ok, %Event{}} <- Events.delete_event(event) do
send_resp(conn, :no_content, "")
end
end
end
| 26.196078 | 79 | 0.658683 |
93f84a8880087d63de49ce4c915fd1e98796b873 | 2,417 | ex | Elixir | clients/dataflow/lib/google_api/dataflow/v1b3/model/list_jobs_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dataflow/lib/google_api/dataflow/v1b3/model/list_jobs_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dataflow/lib/google_api/dataflow/v1b3/model/list_jobs_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dataflow.V1b3.Model.ListJobsResponse do
@moduledoc """
Response to a request to list Cloud Dataflow jobs in a project. This might be a partial response, depending on the page size in the ListJobsRequest. However, if the project does not have any jobs, an instance of ListJobsResponse is not returned and the requests's response body is empty {}.
## Attributes
* `failedLocation` (*type:* `list(GoogleApi.Dataflow.V1b3.Model.FailedLocation.t)`, *default:* `nil`) - Zero or more messages describing the [regional endpoints] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) that failed to respond.
* `jobs` (*type:* `list(GoogleApi.Dataflow.V1b3.Model.Job.t)`, *default:* `nil`) - A subset of the requested job information.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - Set if there may be more results than fit in this response.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:failedLocation => list(GoogleApi.Dataflow.V1b3.Model.FailedLocation.t()) | nil,
:jobs => list(GoogleApi.Dataflow.V1b3.Model.Job.t()) | nil,
:nextPageToken => String.t() | nil
}
field(:failedLocation, as: GoogleApi.Dataflow.V1b3.Model.FailedLocation, type: :list)
field(:jobs, as: GoogleApi.Dataflow.V1b3.Model.Job, type: :list)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.Dataflow.V1b3.Model.ListJobsResponse do
def decode(value, options) do
GoogleApi.Dataflow.V1b3.Model.ListJobsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataflow.V1b3.Model.ListJobsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 45.603774 | 292 | 0.737691 |
93f84c33063986795356b0a0a7ff855ced99c332 | 1,216 | ex | Elixir | server/potter_sea/lib/potter_sea_web/views/error_helpers.ex | monarch619/PotterSea | 937fc62fcb0b4e2e071718353bc6ba95bc310aa1 | [
"Apache-2.0"
] | 44 | 2021-12-12T19:28:26.000Z | 2022-03-05T09:48:00.000Z | server/potter_sea/lib/potter_sea_web/views/error_helpers.ex | RoniAndarsyah/PotterSea | 40560be7842aa5e791a7b8f0e98b507f4e84a443 | [
"Apache-2.0"
] | 1 | 2021-12-14T12:59:16.000Z | 2022-02-18T09:31:59.000Z | server/potter_sea/lib/potter_sea_web/views/error_helpers.ex | RoniAndarsyah/PotterSea | 40560be7842aa5e791a7b8f0e98b507f4e84a443 | [
"Apache-2.0"
] | 7 | 2021-12-12T23:09:04.000Z | 2022-01-02T08:34:45.000Z | defmodule PotterSeaWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(PotterSeaWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(PotterSeaWeb.Gettext, "errors", msg, opts)
end
end
end
| 35.764706 | 78 | 0.672697 |
93f850a306cd7b5078ce1af050486abeef046218 | 2,379 | exs | Elixir | test/exshome_player/web_test.exs | exshome/exshome | ef6b7a89f11dcd2016856dd49517b74aeebb6513 | [
"MIT"
] | 2 | 2021-12-21T16:32:56.000Z | 2022-02-22T17:06:39.000Z | test/exshome_player/web_test.exs | exshome/exshome | ef6b7a89f11dcd2016856dd49517b74aeebb6513 | [
"MIT"
] | null | null | null | test/exshome_player/web_test.exs | exshome/exshome | ef6b7a89f11dcd2016856dd49517b74aeebb6513 | [
"MIT"
] | null | null | null | defmodule ExshomePlayerTest.WebTest do
use ExshomeWeb.ConnCase, async: true
import ExshomeTest.Fixtures
alias ExshomePlayer.Services.{MpvSocket, Playback, PlayerState}
alias ExshomePlayer.Variables
alias ExshomeTest.TestMpvServer
alias ExshomeTest.TestRegistry
describe "player page index without dependencies" do
test "renders fine", %{conn: conn} do
assert {:ok, _view, _html} = live(conn, ExshomePlayer.path(conn, :index))
end
end
describe "player page index with dependencies" do
setup %{conn: conn} do
TestMpvServer.server_fixture()
TestRegistry.start_dependency(MpvSocket, %{})
TestRegistry.start_dependency(PlayerState, %{})
view = live_with_dependencies(conn, ExshomePlayer, :index)
%{view: view}
end
test "updates volume", %{view: view} do
volume_selector = "[name=volume]"
volume = unique_integer()
view |> element(volume_selector) |> render_change(%{volume: volume})
assert_receive_dependency({Variables.Volume, volume})
assert get_value(view, volume_selector) == Integer.to_string(volume)
end
test "updates position", %{view: view} do
position_selector = "[name=position]"
position = unique_integer()
view |> element(position_selector) |> render_change(%{position: position})
assert_receive_dependency({Variables.Position, position})
assert get_value(view, position_selector) == Integer.to_string(position)
end
test "updates pause state", %{view: view} do
random_file = "some_file#{unique_integer()}"
Playback.load_file(random_file)
assert_receive_app_page_dependency({Variables.Pause, false})
assert view |> element("[phx-click=pause]") |> render_click()
assert_receive_app_page_dependency({Variables.Pause, true})
assert view |> element("[phx-click=play]") |> render_click()
assert_receive_app_page_dependency({Variables.Pause, false})
assert view |> element("[phx-click=pause]") |> render_click()
assert_receive_app_page_dependency({Variables.Pause, true})
end
end
describe "player page preview" do
test "renders without dependencies", %{conn: conn} do
assert live_preview(conn, ExshomePlayer)
end
end
defp get_value(view, selector) do
[value] = view |> render() |> Floki.attribute(selector, "value")
value
end
end
| 36.045455 | 80 | 0.703237 |
93f8915061153ba4f4d17730de28a1a0876a1320 | 43,755 | ex | Elixir | lib/elixir/lib/stream.ex | evalphobia/elixir | a07a2362e5827b09d8b27be2c1ad2980d25b9768 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/stream.ex | evalphobia/elixir | a07a2362e5827b09d8b27be2c1ad2980d25b9768 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/stream.ex | evalphobia/elixir | a07a2362e5827b09d8b27be2c1ad2980d25b9768 | [
"Apache-2.0"
] | null | null | null | defmodule Stream do
@moduledoc """
Module for creating and composing streams.
Streams are composable, lazy enumerables. Any enumerable that generates
items one by one during enumeration is called a stream. For example,
Elixir's `Range` is a stream:
iex> range = 1..5
1..5
iex> Enum.map range, &(&1 * 2)
[2, 4, 6, 8, 10]
In the example above, as we mapped over the range, the elements being
enumerated were created one by one, during enumeration. The `Stream`
module allows us to map the range, without triggering its enumeration:
iex> range = 1..3
iex> stream = Stream.map(range, &(&1 * 2))
iex> Enum.map(stream, &(&1 + 1))
[3, 5, 7]
Notice we started with a range and then we created a stream that is
meant to multiply each item in the range by 2. At this point, no
computation was done. Only when `Enum.map/2` is called we actually
enumerate over each item in the range, multiplying it by 2 and adding 1.
We say the functions in `Stream` are *lazy* and the functions in `Enum`
are *eager*.
Due to their laziness, streams are useful when working with large
(or even infinite) collections. When chaining many operations with `Enum`,
intermediate lists are created, while `Stream` creates a recipe of
computations that are executed at a later moment. Let's see another
example:
1..3
|> Enum.map(&IO.inspect(&1))
|> Enum.map(&(&1 * 2))
|> Enum.map(&IO.inspect(&1))
1
2
3
2
4
6
#=> [2, 4, 6]
Notice that we first printed each item in the list, then multiplied each
element by 2 and finally printed each new value. In this example, the list
was enumerated three times. Let's see an example with streams:
stream = 1..3
|> Stream.map(&IO.inspect(&1))
|> Stream.map(&(&1 * 2))
|> Stream.map(&IO.inspect(&1))
Enum.to_list(stream)
1
2
2
4
3
6
#=> [2, 4, 6]
Although the end result is the same, the order in which the items were
printed changed! With streams, we print the first item and then print
its double. In this example, the list was enumerated just once!
That's what we meant when we said earlier that streams are composable,
lazy enumerables. Notice we could call `Stream.map/2` multiple times,
effectively composing the streams and keeping them lazy. The computations
are only performed when you call a function from the `Enum` module.
## Creating Streams
There are many functions in Elixir's standard library that return
streams, some examples are:
* `IO.stream/2` - streams input lines, one by one
* `URI.query_decoder/1` - decodes a query string, pair by pair
This module also provides many convenience functions for creating streams,
like `Stream.cycle/1`, `Stream.unfold/2`, `Stream.resource/3` and more.
Note the functions in this module are guaranteed to return enumerables.
Since enumerables can have different shapes (structs, anonymous functions,
and so on), the functions in this module may return any of those shapes
and this may change at any time. For example, a function that today
returns an anonymous function may return a struct in future releases.
"""
@doc false
defstruct enum: nil, funs: [], accs: [], done: nil
@type acc :: any
@type element :: any
@type index :: non_neg_integer
@type default :: any
# Require Stream.Reducers and its callbacks
require Stream.Reducers, as: R
defmacrop skip(acc) do
{:cont, acc}
end
defmacrop next(fun, entry, acc) do
quote do: unquote(fun).(unquote(entry), unquote(acc))
end
defmacrop acc(head, state, tail) do
quote do: [unquote(head), unquote(state) | unquote(tail)]
end
defmacrop next_with_acc(fun, entry, head, state, tail) do
quote do
{reason, [head | tail]} = unquote(fun).(unquote(entry), [unquote(head) | unquote(tail)])
{reason, [head, unquote(state) | tail]}
end
end
## Transformers
@doc """
Shortcut to `chunk(enum, n, n)`.
"""
@spec chunk(Enumerable.t, pos_integer) :: Enumerable.t
def chunk(enum, n), do: chunk(enum, n, n, nil)
@doc """
Streams the enumerable in chunks, containing `n` items each, where
each new chunk starts `step` elements into the enumerable.
`step` is optional and, if not passed, defaults to `n`, i.e.
chunks do not overlap. If the final chunk does not have `n`
elements to fill the chunk, elements are taken as necessary
from `leftover` if it was passed. If `leftover` is passed and
does not have enough elements to fill the chunk, then the chunk is
returned anyway with less than `n` elements. If `leftover` is not
passed at all or is `nil`, then the partial chunk is discarded
from the result.
## Examples
iex> Stream.chunk([1, 2, 3, 4, 5, 6], 2) |> Enum.to_list
[[1, 2], [3, 4], [5, 6]]
iex> Stream.chunk([1, 2, 3, 4, 5, 6], 3, 2) |> Enum.to_list
[[1, 2, 3], [3, 4, 5]]
iex> Stream.chunk([1, 2, 3, 4, 5, 6], 3, 2, [7]) |> Enum.to_list
[[1, 2, 3], [3, 4, 5], [5, 6, 7]]
iex> Stream.chunk([1, 2, 3, 4, 5, 6], 3, 3, []) |> Enum.to_list
[[1, 2, 3], [4, 5, 6]]
"""
@spec chunk(Enumerable.t, pos_integer, pos_integer) :: Enumerable.t
@spec chunk(Enumerable.t, pos_integer, pos_integer, Enumerable.t | nil) :: Enumerable.t
def chunk(enum, n, step, leftover \\ nil)
when is_integer(n) and n > 0 and is_integer(step) and step > 0 do
limit = :erlang.max(n, step)
if is_nil(leftover) do
lazy enum, {[], 0}, fn(f1) -> R.chunk(n, step, limit, f1) end
else
lazy enum, {[], 0},
fn(f1) -> R.chunk(n, step, limit, f1) end,
&do_chunk(&1, n, leftover, &2)
end
end
defp do_chunk(acc(_, {_, 0}, _) = acc, _, _, _) do
{:cont, acc}
end
defp do_chunk(acc(h, {buffer, count} = old, t), n, leftover, f1) do
buffer = :lists.reverse(buffer, Enum.take(leftover, n - count))
next_with_acc(f1, buffer, h, old, t)
end
@doc """
Chunks the `enum` by buffering elements for which `fun` returns
the same value and only emit them when `fun` returns a new value
or the `enum` finishes.
## Examples
iex> stream = Stream.chunk_by([1, 2, 2, 3, 4, 4, 6, 7, 7], &(rem(&1, 2) == 1))
iex> Enum.to_list(stream)
[[1], [2, 2], [3], [4, 4, 6], [7, 7]]
"""
@spec chunk_by(Enumerable.t, (element -> any)) :: Enumerable.t
def chunk_by(enum, fun) do
lazy enum, nil,
fn(f1) -> R.chunk_by(fun, f1) end,
&do_chunk_by(&1, &2)
end
defp do_chunk_by(acc(_, nil, _) = acc, _f1) do
{:cont, acc}
end
defp do_chunk_by(acc(h, {buffer, _}, t), f1) do
next_with_acc(f1, :lists.reverse(buffer), h, nil, t)
end
@doc """
Creates a stream that only emits elements if they are different from the last emitted element.
This function only ever needs to store the last emitted element.
Elements are compared using `===`.
## Examples
iex> Stream.dedup([1, 2, 3, 3, 2, 1]) |> Enum.to_list
[1, 2, 3, 2, 1]
"""
@spec dedup(Enumerable.t) :: Enumerable.t
def dedup(enum) do
dedup_by(enum, fn x -> x end)
end
@doc """
Creates a stream that only emits elements if the result of calling `fun` on the element is
different from the (stored) result of calling `fun` on the last emitted element.
## Examples
iex> Stream.dedup_by([{1, :x}, {2, :y}, {2, :z}, {1, :x}], fn {x, _} -> x end) |> Enum.to_list
[{1, :x}, {2, :y}, {1, :x}]
"""
@spec dedup_by(Enumerable.t, (element -> term)) :: Enumerable.t
def dedup_by(enum, fun) do
lazy enum, nil, fn f1 -> R.dedup(fun, f1) end
end
@doc """
Lazily drops the next `n` items from the enumerable.
If a negative `n` is given, it will drop the last `n` items from
the collection. Note that the mechanism by which this is implemented
will delay the emission of any item until `n` additional items have
been emitted by the enum.
## Examples
iex> stream = Stream.drop(1..10, 5)
iex> Enum.to_list(stream)
[6, 7, 8, 9, 10]
iex> stream = Stream.drop(1..10, -5)
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5]
"""
@spec drop(Enumerable.t, non_neg_integer) :: Enumerable.t
def drop(enum, n) when n >= 0 do
lazy enum, n, fn(f1) -> R.drop(f1) end
end
def drop(enum, n) when n < 0 do
n = abs(n)
lazy enum, {0, [], []}, fn(f1) ->
fn
entry, [h, {count, buf1, []} | t] ->
do_drop(:cont, n, entry, h, count, buf1, [], t)
entry, [h, {count, buf1, [next | buf2]} | t] ->
{reason, [h | t]} = f1.(next, [h | t])
do_drop(reason, n, entry, h, count, buf1, buf2, t)
end
end
end
defp do_drop(reason, n, entry, h, count, buf1, buf2, t) do
buf1 = [entry | buf1]
count = count + 1
if count == n do
{reason, [h, {0, [], :lists.reverse(buf1)} | t]}
else
{reason, [h, {count, buf1, buf2} | t]}
end
end
@doc """
Creates a stream that drops every `nth` item from the enumerable.
The first item is always dropped, unless `nth` is 0.
`nth` must be a non-negative integer.
## Examples
iex> stream = Stream.drop_every(1..10, 2)
iex> Enum.to_list(stream)
[2, 4, 6, 8, 10]
iex> stream = Stream.drop_every(1..1000, 1)
iex> Enum.to_list(stream)
[]
iex> stream = Stream.drop_every([1, 2, 3, 4, 5], 0)
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5]
"""
@spec drop_every(Enumerable.t, non_neg_integer) :: Enumerable.t
def drop_every(enum, nth)
def drop_every(enum, 0), do: %Stream{enum: enum}
def drop_every([], _nth), do: %Stream{enum: []}
def drop_every(enum, nth) when is_integer(nth) and nth > 0 do
lazy enum, nth, fn(f1) -> R.drop_every(nth, f1) end
end
@doc """
Lazily drops elements of the enumerable while the given
function returns `true`.
## Examples
iex> stream = Stream.drop_while(1..10, &(&1 <= 5))
iex> Enum.to_list(stream)
[6, 7, 8, 9, 10]
"""
@spec drop_while(Enumerable.t, (element -> as_boolean(term))) :: Enumerable.t
def drop_while(enum, fun) do
lazy enum, true, fn(f1) -> R.drop_while(fun, f1) end
end
@doc """
Executes the given function for each item.
Useful for adding side effects (like printing) to a stream.
## Examples
iex> stream = Stream.each([1, 2, 3], fn(x) -> send self(), x end)
iex> Enum.to_list(stream)
iex> receive do: (x when is_integer(x) -> x)
1
iex> receive do: (x when is_integer(x) -> x)
2
iex> receive do: (x when is_integer(x) -> x)
3
"""
@spec each(Enumerable.t, (element -> term)) :: Enumerable.t
def each(enum, fun) do
lazy enum, fn(f1) ->
fn(x, acc) ->
fun.(x)
f1.(x, acc)
end
end
end
@doc """
Maps the given `fun` over `enumerable` and flattens the result.
This function returns a new stream built by appending the result of invoking `fun`
on each element of `enumerable` together.
## Examples
iex> stream = Stream.flat_map([1, 2, 3], fn(x) -> [x, x * 2] end)
iex> Enum.to_list(stream)
[1, 2, 2, 4, 3, 6]
iex> stream = Stream.flat_map([1, 2, 3], fn(x) -> [[x]] end)
iex> Enum.to_list(stream)
[[1], [2], [3]]
"""
@spec flat_map(Enumerable.t, (element -> Enumerable.t)) :: Enumerable.t
def flat_map(enum, mapper) do
transform(enum, nil, fn val, nil -> {mapper.(val), nil} end)
end
@doc """
Creates a stream that filters elements according to
the given function on enumeration.
## Examples
iex> stream = Stream.filter([1, 2, 3], fn(x) -> rem(x, 2) == 0 end)
iex> Enum.to_list(stream)
[2]
"""
@spec filter(Enumerable.t, (element -> as_boolean(term))) :: Enumerable.t
def filter(enum, fun) do
lazy enum, fn(f1) -> R.filter(fun, f1) end
end
@doc """
Creates a stream that filters and then maps elements according
to given functions.
Exists for symmetry with `Enum.filter_map/3`.
## Examples
iex> stream = Stream.filter_map(1..6, fn(x) -> rem(x, 2) == 0 end, &(&1 * 2))
iex> Enum.to_list(stream)
[4, 8, 12]
"""
@spec filter_map(Enumerable.t, (element -> as_boolean(term)), (element -> any)) :: Enumerable.t
def filter_map(enum, filter, mapper) do
lazy enum, fn(f1) -> R.filter_map(filter, mapper, f1) end
end
@doc """
Creates a stream that emits a value after the given period `n`
in milliseconds.
The values emitted are an increasing counter starting at `0`.
This operation will block the caller by the given interval
every time a new item is streamed.
Do not use this function to generate a sequence of numbers.
If blocking the caller process is not necessary, use
`Stream.iterate(0, & &1 + 1)` instead.
## Examples
iex> Stream.interval(10) |> Enum.take(10)
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
"""
@spec interval(non_neg_integer) :: Enumerable.t
def interval(n) do
unfold 0, fn(count) ->
Process.sleep(n)
{count, count + 1}
end
end
@doc """
Injects the stream values into the given collectable as a side-effect.
This function is often used with `run/1` since any evaluation
is delayed until the stream is executed. See `run/1` for an example.
"""
@spec into(Enumerable.t, Collectable.t, (term -> term)) :: Enumerable.t
def into(enum, collectable, transform \\ fn x -> x end) do
&do_into(enum, collectable, transform, &1, &2)
end
defp do_into(enum, collectable, transform, acc, fun) do
{initial, into} = Collectable.into(collectable)
composed = fn x, [acc | collectable] ->
collectable = into.(collectable, {:cont, transform.(x)})
{reason, acc} = fun.(x, acc)
{reason, [acc | collectable]}
end
do_into(&Enumerable.reduce(enum, &1, composed), initial, into, acc)
end
defp do_into(reduce, collectable, into, {command, acc}) do
try do
reduce.({command, [acc | collectable]})
catch
kind, reason ->
stacktrace = System.stacktrace
into.(collectable, :halt)
:erlang.raise(kind, reason, stacktrace)
else
{:suspended, [acc | collectable], continuation} ->
{:suspended, acc, &do_into(continuation, collectable, into, &1)}
{reason, [acc | collectable]} ->
into.(collectable, :done)
{reason, acc}
end
end
@doc """
Creates a stream that will apply the given function on
enumeration.
## Examples
iex> stream = Stream.map([1, 2, 3], fn(x) -> x * 2 end)
iex> Enum.to_list(stream)
[2, 4, 6]
"""
@spec map(Enumerable.t, (element -> any)) :: Enumerable.t
def map(enum, fun) do
lazy enum, fn(f1) -> R.map(fun, f1) end
end
@doc """
Creates a stream that will apply the given function on
every `nth` item from the enumerable.
The first item is always passed to the given function.
`nth` must be a non-negative integer.
## Examples
iex> stream = Stream.map_every(1..10, 2, fn(x) -> x * 2 end)
iex> Enum.to_list(stream)
[2, 2, 6, 4, 10, 6, 14, 8, 18, 10]
iex> stream = Stream.map_every([1, 2, 3, 4, 5], 1, fn(x) -> x * 2 end)
iex> Enum.to_list(stream)
[2, 4, 6, 8, 10]
iex> stream = Stream.map_every(1..5, 0, fn(x) -> x * 2 end)
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5]
"""
@spec map_every(Enumerable.t, non_neg_integer, (element -> any)) :: Enumerable.t
def map_every(enum, nth, fun)
def map_every(enum, 1, fun), do: map(enum, fun)
def map_every(enum, 0, _fun), do: %Stream{enum: enum}
def map_every([], _nth, _fun), do: %Stream{enum: []}
def map_every(enum, nth, fun) when is_integer(nth) and nth > 0 do
lazy enum, nth, fn(f1) -> R.map_every(nth, fun, f1) end
end
@doc """
Creates a stream that will reject elements according to
the given function on enumeration.
## Examples
iex> stream = Stream.reject([1, 2, 3], fn(x) -> rem(x, 2) == 0 end)
iex> Enum.to_list(stream)
[1, 3]
"""
@spec reject(Enumerable.t, (element -> as_boolean(term))) :: Enumerable.t
def reject(enum, fun) do
lazy enum, fn(f1) -> R.reject(fun, f1) end
end
@doc """
Runs the given stream.
This is useful when a stream needs to be run, for side effects,
and there is no interest in its return result.
## Examples
Open up a file, replace all `#` by `%` and stream to another file
without loading the whole file in memory:
stream = File.stream!("code")
|> Stream.map(&String.replace(&1, "#", "%"))
|> Stream.into(File.stream!("new"))
|> Stream.run
No computation will be done until we call one of the Enum functions
or `Stream.run/1`.
"""
@spec run(Enumerable.t) :: :ok
def run(stream) do
_ = Enumerable.reduce(stream, {:cont, nil}, fn(_, _) -> {:cont, nil} end)
:ok
end
@doc """
Creates a stream that applies the given function to each
element, emits the result and uses the same result as the accumulator
for the next computation. Uses the first element in the enumerable
as the starting value.
## Examples
iex> stream = Stream.scan(1..5, &(&1 + &2))
iex> Enum.to_list(stream)
[1, 3, 6, 10, 15]
"""
@spec scan(Enumerable.t, (element, acc -> any)) :: Enumerable.t
def scan(enum, fun) do
lazy enum, :first, fn(f1) -> R.scan2(fun, f1) end
end
@doc """
Creates a stream that applies the given function to each
element, emits the result and uses the same result as the accumulator
for the next computation. Uses the given `acc` as the starting value.
## Examples
iex> stream = Stream.scan(1..5, 0, &(&1 + &2))
iex> Enum.to_list(stream)
[1, 3, 6, 10, 15]
"""
@spec scan(Enumerable.t, acc, (element, acc -> any)) :: Enumerable.t
def scan(enum, acc, fun) do
lazy enum, acc, fn(f1) -> R.scan3(fun, f1) end
end
@doc """
Lazily takes the next `count` items from the enumerable and stops
enumeration.
If a negative `count` is given, the last `count` values will be taken.
For such, the collection is fully enumerated keeping up to `2 * count`
elements in memory. Once the end of the collection is reached,
the last `count` elements will be executed. Therefore, using
a negative `count` on an infinite collection will never return.
## Examples
iex> stream = Stream.take(1..100, 5)
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5]
iex> stream = Stream.take(1..100, -5)
iex> Enum.to_list(stream)
[96, 97, 98, 99, 100]
iex> stream = Stream.cycle([1, 2, 3]) |> Stream.take(5)
iex> Enum.to_list(stream)
[1, 2, 3, 1, 2]
"""
@spec take(Enumerable.t, integer) :: Enumerable.t
def take(_enum, 0), do: %Stream{enum: []}
def take([], _count), do: %Stream{enum: []}
def take(enum, count) when is_integer(count) and count > 0 do
lazy enum, count, fn(f1) -> R.take(f1) end
end
def take(enum, count) when is_integer(count) and count < 0 do
&Enumerable.reduce(Enum.take(enum, count), &1, &2)
end
@doc """
Creates a stream that takes every `nth` item from the enumerable.
The first item is always included, unless `nth` is 0.
`nth` must be a non-negative integer.
## Examples
iex> stream = Stream.take_every(1..10, 2)
iex> Enum.to_list(stream)
[1, 3, 5, 7, 9]
iex> stream = Stream.take_every([1, 2, 3, 4, 5], 1)
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5]
iex> stream = Stream.take_every(1..1000, 0)
iex> Enum.to_list(stream)
[]
"""
@spec take_every(Enumerable.t, non_neg_integer) :: Enumerable.t
def take_every(enum, nth)
def take_every(_enum, 0), do: %Stream{enum: []}
def take_every([], _nth), do: %Stream{enum: []}
def take_every(enum, nth) when is_integer(nth) and nth > 0 do
lazy enum, nth, fn(f1) -> R.take_every(nth, f1) end
end
@doc """
Lazily takes elements of the enumerable while the given
function returns `true`.
## Examples
iex> stream = Stream.take_while(1..100, &(&1 <= 5))
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5]
"""
@spec take_while(Enumerable.t, (element -> as_boolean(term))) :: Enumerable.t
def take_while(enum, fun) do
lazy enum, fn(f1) -> R.take_while(fun, f1) end
end
@doc """
Creates a stream that emits a single value after `n` milliseconds.
The value emitted is `0`. This operation will block the caller by
the given time until the item is streamed.
## Examples
iex> Stream.timer(10) |> Enum.to_list
[0]
"""
@spec timer(non_neg_integer) :: Enumerable.t
def timer(n) do
take(interval(n), 1)
end
@doc """
Transforms an existing stream.
It expects an accumulator and a function that receives each stream item
and an accumulator, and must return a tuple containing a new stream
(often a list) with the new accumulator or a tuple with `:halt` as first
element and the accumulator as second.
Note: this function is similar to `Enum.flat_map_reduce/3` except the
latter returns both the flat list and accumulator, while this one returns
only the stream.
## Examples
`Stream.transform/3` is useful as it can be used as the basis to implement
many of the functions defined in this module. For example, we can implement
`Stream.take(enum, n)` as follows:
iex> enum = 1..100
iex> n = 3
iex> stream = Stream.transform(enum, 0, fn i, acc ->
...> if acc < n, do: {[i], acc + 1}, else: {:halt, acc}
...> end)
iex> Enum.to_list(stream)
[1, 2, 3]
"""
@spec transform(Enumerable.t, acc, fun) :: Enumerable.t
when fun: (element, acc -> {Enumerable.t, acc} | {:halt, acc}),
acc: any
def transform(enum, acc, reducer) do
&do_transform(enum, fn -> acc end, reducer, &1, &2, nil)
end
@doc """
Transforms an existing stream with function-based start and finish.
The accumulator is only calculated when transformation starts. It also
allows an after function to be given which is invoked when the stream
halts or completes.
This function can be seen as a combination of `Stream.resource/3` with
`Stream.transform/3`.
"""
@spec transform(Enumerable.t, (() -> acc), fun, (acc -> term)) :: Enumerable.t
when fun: (element, acc -> {Enumerable.t, acc} | {:halt, acc}),
acc: any
def transform(enum, start_fun, reducer, after_fun) do
&do_transform(enum, start_fun, reducer, &1, &2, after_fun)
end
defp do_transform(enumerables, user_acc, user, inner_acc, fun, after_fun) do
inner = &do_transform_each(&1, &2, fun)
step = &do_transform_step(&1, &2)
next = &Enumerable.reduce(enumerables, &1, step)
do_transform(user_acc.(), user, fun, :cont, next, inner_acc, inner, after_fun)
end
defp do_transform(user_acc, _user, _fun, _next_op, next, {:halt, inner_acc}, _inner, after_fun) do
next.({:halt, []})
do_after(after_fun, user_acc)
{:halted, inner_acc}
end
defp do_transform(user_acc, user, fun, next_op, next, {:suspend, inner_acc}, inner, after_fun) do
{:suspended, inner_acc, &do_transform(user_acc, user, fun, next_op, next, &1, inner, after_fun)}
end
defp do_transform(user_acc, _user, _fun, :halt, _next, {_, inner_acc}, _inner, after_fun) do
do_after(after_fun, user_acc)
{:halted, inner_acc}
end
defp do_transform(user_acc, user, fun, :cont, next, inner_acc, inner, after_fun) do
try do
next.({:cont, []})
catch
kind, reason ->
stacktrace = System.stacktrace
do_after(after_fun, user_acc)
:erlang.raise(kind, reason, stacktrace)
else
{:suspended, vals, next} ->
do_transform_user(:lists.reverse(vals), user_acc, user, fun, :cont, next, inner_acc, inner, after_fun)
{_, vals} ->
do_transform_user(:lists.reverse(vals), user_acc, user, fun, :halt, next, inner_acc, inner, after_fun)
end
end
defp do_transform_user([], user_acc, user, fun, next_op, next, inner_acc, inner, after_fun) do
do_transform(user_acc, user, fun, next_op, next, inner_acc, inner, after_fun)
end
defp do_transform_user([val | vals], user_acc, user, fun, next_op, next, inner_acc, inner, after_fun) do
user.(val, user_acc)
catch
kind, reason ->
stacktrace = System.stacktrace
next.({:halt, []})
do_after(after_fun, user_acc)
:erlang.raise(kind, reason, stacktrace)
else
{[], user_acc} ->
do_transform_user(vals, user_acc, user, fun, next_op, next, inner_acc, inner, after_fun)
{list, user_acc} when is_list(list) ->
do_list_transform(vals, user_acc, user, fun, next_op, next, inner_acc, inner,
&Enumerable.List.reduce(list, &1, fun), after_fun)
{:halt, user_acc} ->
next.({:halt, []})
do_after(after_fun, user_acc)
{:halted, elem(inner_acc, 1)}
{other, user_acc} ->
do_enum_transform(vals, user_acc, user, fun, next_op, next, inner_acc, inner,
&Enumerable.reduce(other, &1, inner), after_fun)
end
defp do_list_transform(vals, user_acc, user, fun, next_op, next, inner_acc, inner, reduce, after_fun) do
try do
reduce.(inner_acc)
catch
kind, reason ->
stacktrace = System.stacktrace
next.({:halt, []})
do_after(after_fun, user_acc)
:erlang.raise(kind, reason, stacktrace)
else
{:done, acc} ->
do_transform_user(vals, user_acc, user, fun, next_op, next, {:cont, acc}, inner, after_fun)
{:halted, acc} ->
next.({:halt, []})
do_after(after_fun, user_acc)
{:halted, acc}
{:suspended, acc, c} ->
{:suspended, acc, &do_list_transform(vals, user_acc, user, fun, next_op, next, &1, inner, c, after_fun)}
end
end
defp do_enum_transform(vals, user_acc, user, fun, next_op, next, {op, inner_acc}, inner, reduce, after_fun) do
try do
reduce.({op, [:outer | inner_acc]})
catch
kind, reason ->
stacktrace = System.stacktrace
next.({:halt, []})
do_after(after_fun, user_acc)
:erlang.raise(kind, reason, stacktrace)
else
# Only take into account outer halts when the op is not halt itself.
# Otherwise, we were the ones wishing to halt, so we should just stop.
{:halted, [:outer | acc]} when op != :halt ->
do_transform_user(vals, user_acc, user, fun, next_op, next, {:cont, acc}, inner, after_fun)
{:halted, [_ | acc]} ->
next.({:halt, []})
do_after(after_fun, user_acc)
{:halted, acc}
{:done, [_ | acc]} ->
do_transform_user(vals, user_acc, user, fun, next_op, next, {:cont, acc}, inner, after_fun)
{:suspended, [_ | acc], c} ->
{:suspended, acc, &do_enum_transform(vals, user_acc, user, fun, next_op, next, &1, inner, c, after_fun)}
end
end
defp do_after(nil, _user_acc), do: :ok
defp do_after(fun, user_acc), do: fun.(user_acc)
defp do_transform_each(x, [:outer | acc], f) do
case f.(x, acc) do
{:halt, res} -> {:halt, [:inner | res]}
{op, res} -> {op, [:outer | res]}
end
end
defp do_transform_step(x, acc) do
{:suspend, [x | acc]}
end
@doc """
Creates a stream that only emits elements if they are unique.
Keep in mind that, in order to know if an element is unique
or not, this function needs to store all unique values emitted
by the stream. Therefore, if the stream is infinite, the number
of items stored will grow infinitely, never being garbage collected.
## Examples
iex> Stream.uniq([1, 2, 3, 3, 2, 1]) |> Enum.to_list
[1, 2, 3]
"""
@spec uniq(Enumerable.t) :: Enumerable.t
def uniq(enum) do
uniq_by(enum, fn x -> x end)
end
@doc false
# TODO: Remove on 2.0
# (hard-deprecated in elixir_dispatch)
def uniq(enum, fun) do
uniq_by(enum, fun)
end
@doc """
Creates a stream that only emits elements if they are unique, by removing the
elements for which function `fun` returned duplicate items.
The function `fun` maps every element to a term which is used to
determine if two elements are duplicates.
Keep in mind that, in order to know if an element is unique
or not, this function needs to store all unique values emitted
by the stream. Therefore, if the stream is infinite, the number
of items stored will grow infinitely, never being garbage collected.
## Example
iex> Stream.uniq_by([{1, :x}, {2, :y}, {1, :z}], fn {x, _} -> x end) |> Enum.to_list
[{1, :x}, {2, :y}]
iex> Stream.uniq_by([a: {:tea, 2}, b: {:tea, 2}, c: {:coffee, 1}], fn {_, y} -> y end) |> Enum.to_list
[a: {:tea, 2}, c: {:coffee, 1}]
"""
@spec uniq_by(Enumerable.t, (element -> term)) :: Enumerable.t
def uniq_by(enum, fun) do
lazy enum, %{}, fn f1 -> R.uniq_by(fun, f1) end
end
@doc """
Creates a stream where each item in the enumerable will
be wrapped in a tuple alongside its index.
If an `offset` is given, we will index from the given offset instead of from zero.
## Examples
iex> stream = Stream.with_index([1, 2, 3])
iex> Enum.to_list(stream)
[{1, 0}, {2, 1}, {3, 2}]
iex> stream = Stream.with_index([1, 2, 3], 3)
iex> Enum.to_list(stream)
[{1, 3}, {2, 4}, {3, 5}]
"""
@spec with_index(Enumerable.t) :: Enumerable.t
@spec with_index(Enumerable.t, integer) :: Enumerable.t
def with_index(enum, offset \\ 0) do
lazy enum, offset, fn(f1) -> R.with_index(f1) end
end
## Combiners
@doc """
Creates a stream that enumerates each enumerable in an enumerable.
## Examples
iex> stream = Stream.concat([1..3, 4..6, 7..9])
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5, 6, 7, 8, 9]
"""
@spec concat(Enumerable.t) :: Enumerable.t
def concat(enumerables) do
flat_map(enumerables, &(&1))
end
@doc """
Creates a stream that enumerates the first argument, followed by the second.
## Examples
iex> stream = Stream.concat(1..3, 4..6)
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5, 6]
iex> stream1 = Stream.cycle([1, 2, 3])
iex> stream2 = Stream.cycle([4, 5, 6])
iex> stream = Stream.concat(stream1, stream2)
iex> Enum.take(stream, 6)
[1, 2, 3, 1, 2, 3]
"""
@spec concat(Enumerable.t, Enumerable.t) :: Enumerable.t
def concat(first, second) do
flat_map([first, second], &(&1))
end
@doc """
Zips two collections together, lazily.
The zipping finishes as soon as any enumerable completes.
## Examples
iex> concat = Stream.concat(1..3, 4..6)
iex> cycle = Stream.cycle([:a, :b, :c])
iex> Stream.zip(concat, cycle) |> Enum.to_list
[{1, :a}, {2, :b}, {3, :c}, {4, :a}, {5, :b}, {6, :c}]
"""
@spec zip(Enumerable.t, Enumerable.t) :: Enumerable.t
def zip(left, right), do: zip([left, right])
@doc """
Zips corresponding elements from a collection of enumerables
into one stream of tuples.
The zipping finishes as soon as any enumerable completes.
## Examples
iex> concat = Stream.concat(1..3, 4..6)
iex> cycle = Stream.cycle(["foo", "bar", "baz"])
iex> Stream.zip([concat, [:a, :b, :c], cycle]) |> Enum.to_list
[{1, :a, "foo"}, {2, :b, "bar"}, {3, :c, "baz"}]
"""
@spec zip([Enumerable.t]) :: Enumerable.t
def zip(enumerables) do
step = &do_zip_step(&1, &2)
enum_funs = Enum.map(enumerables, fn enum ->
{&Enumerable.reduce(enum, &1, step), :cont}
end)
&do_zip(enum_funs, &1, &2)
end
# This implementation of do_zip/3 works for any number of
# streams to zip, even if right now zip/2 only zips two streams.
defp do_zip(zips, {:halt, acc}, _fun) do
do_zip_close(zips)
{:halted, acc}
end
defp do_zip(zips, {:suspend, acc}, fun) do
{:suspended, acc, &do_zip(zips, &1, fun)}
end
defp do_zip(zips, {:cont, acc}, callback) do
try do
do_zip_next_tuple(zips, acc, callback, [], [])
catch
kind, reason ->
stacktrace = System.stacktrace
do_zip_close(zips)
:erlang.raise(kind, reason, stacktrace)
else
{:next, buffer, acc} ->
do_zip(buffer, acc, callback)
{:done, _acc} = other ->
other
end
end
# do_zip_next_tuple/5 computes the next tuple formed by
# the next element of each zipped stream.
defp do_zip_next_tuple([{_, :halt} | zips], acc, _callback, _yielded_elems, buffer) do
do_zip_close(:lists.reverse(buffer, zips))
{:done, acc}
end
defp do_zip_next_tuple([{fun, :cont} | zips], acc, callback, yielded_elems, buffer) do
case fun.({:cont, []}) do
{:suspended, [elem], fun} ->
do_zip_next_tuple(zips, acc, callback, [elem | yielded_elems], [{fun, :cont} | buffer])
{_, [elem]} ->
do_zip_next_tuple(zips, acc, callback, [elem | yielded_elems], [{fun, :halt} | buffer])
{_, []} ->
# The current zipped stream terminated, so we close all the streams
# and return {:halted, acc} (which is returned as is by do_zip/3).
do_zip_close(:lists.reverse(buffer, zips))
{:done, acc}
end
end
defp do_zip_next_tuple([] = _zips, acc, callback, yielded_elems, buffer) do
# "yielded_elems" is a reversed list of results for the current iteration of
# zipping: it needs to be reversed and converted to a tuple to have the next
# tuple in the list resulting from zipping.
zipped = List.to_tuple(:lists.reverse(yielded_elems))
{:next, :lists.reverse(buffer), callback.(zipped, acc)}
end
defp do_zip_close(zips) do
:lists.foreach(fn {fun, _} -> fun.({:halt, []}) end, zips)
end
defp do_zip_step(x, []) do
{:suspend, [x]}
end
## Sources
@doc """
Creates a stream that cycles through the given enumerable,
infinitely.
## Examples
iex> stream = Stream.cycle([1, 2, 3])
iex> Enum.take(stream, 5)
[1, 2, 3, 1, 2]
"""
@spec cycle(Enumerable.t) :: Enumerable.t
def cycle(enumerable)
def cycle(enumerable) when is_list(enumerable) do
unfold {enumerable, enumerable}, fn
{source, [h | t]} -> {h, {source, t}}
{source = [h | t], []} -> {h, {source, t}}
end
end
def cycle(enumerable) do
fn acc, fun ->
inner = &do_cycle_each(&1, &2, fun)
outer = &Enumerable.reduce(enumerable, &1, inner)
do_cycle(outer, outer, acc)
end
end
defp do_cycle(_reduce, _cycle, {:halt, acc}) do
{:halted, acc}
end
defp do_cycle(reduce, cycle, {:suspend, acc}) do
{:suspended, acc, &do_cycle(reduce, cycle, &1)}
end
defp do_cycle(reduce, cycle, acc) do
try do
reduce.(acc)
catch
{:stream_cycle, acc} ->
{:halted, acc}
else
{state, acc} when state in [:done, :halted] ->
do_cycle(cycle, cycle, {:cont, acc})
{:suspended, acc, continuation} ->
{:suspended, acc, &do_cycle(continuation, cycle, &1)}
end
end
defp do_cycle_each(x, acc, f) do
case f.(x, acc) do
{:halt, h} -> throw({:stream_cycle, h})
{_, _} = o -> o
end
end
@doc """
Emits a sequence of values, starting with `start_value`. Successive
values are generated by calling `next_fun` on the previous value.
## Examples
iex> Stream.iterate(0, &(&1+1)) |> Enum.take(5)
[0, 1, 2, 3, 4]
"""
@spec iterate(element, (element -> element)) :: Enumerable.t
def iterate(start_value, next_fun) do
unfold({:ok, start_value}, fn
{:ok, value} ->
{value, {:next, value}}
{:next, value} ->
next = next_fun.(value)
{next, {:next, next}}
end)
end
@doc """
Returns a stream generated by calling `generator_fun` repeatedly.
## Examples
# Although not necessary, let's seed the random algorithm
iex> :rand.seed(:exsplus, {1, 2, 3})
iex> Stream.repeatedly(&:rand.uniform/0) |> Enum.take(3)
[0.40502929729990744, 0.45336720247823126, 0.04094511692041057]
"""
@spec repeatedly((() -> element)) :: Enumerable.t
def repeatedly(generator_fun) do
&do_repeatedly(generator_fun, &1, &2)
end
defp do_repeatedly(generator_fun, {:suspend, acc}, fun) do
{:suspended, acc, &do_repeatedly(generator_fun, &1, fun)}
end
defp do_repeatedly(_generator_fun, {:halt, acc}, _fun) do
{:halted, acc}
end
defp do_repeatedly(generator_fun, {:cont, acc}, fun) do
do_repeatedly(generator_fun, fun.(generator_fun.(), acc), fun)
end
@doc """
Emits a sequence of values for the given resource.
Similar to `transform/3` but the initial accumulated value is
computed lazily via `start_fun` and executes an `after_fun` at
the end of enumeration (both in cases of success and failure).
Successive values are generated by calling `next_fun` with the
previous accumulator (the initial value being the result returned
by `start_fun`) and it must return a tuple containing a list
of items to be emitted and the next accumulator. The enumeration
finishes if it returns `{:halt, acc}`.
As the name says, this function is useful to stream values from
resources.
## Examples
Stream.resource(fn -> File.open!("sample") end,
fn file ->
case IO.read(file, :line) do
data when is_binary(data) -> {[data], file}
_ -> {:halt, file}
end
end,
fn file -> File.close(file) end)
"""
@spec resource((() -> acc), (acc -> {[element], acc} | {:halt, acc}), (acc -> term)) :: Enumerable.t
def resource(start_fun, next_fun, after_fun) do
&do_resource(start_fun.(), next_fun, &1, &2, after_fun)
end
defp do_resource(next_acc, next_fun, {:suspend, acc}, fun, after_fun) do
{:suspended, acc, &do_resource(next_acc, next_fun, &1, fun, after_fun)}
end
defp do_resource(next_acc, _next_fun, {:halt, acc}, _fun, after_fun) do
after_fun.(next_acc)
{:halted, acc}
end
defp do_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun) do
try do
# Optimize the most common cases
case next_fun.(next_acc) do
{[], next_acc} -> {:opt, {:cont, acc}, next_acc}
{[v], next_acc} -> {:opt, fun.(v, acc), next_acc}
{_, _} = other -> other
end
catch
kind, reason ->
stacktrace = System.stacktrace
after_fun.(next_acc)
:erlang.raise(kind, reason, stacktrace)
else
{:opt, acc, next_acc} ->
do_resource(next_acc, next_fun, acc, fun, after_fun)
{:halt, next_acc} ->
do_resource(next_acc, next_fun, {:halt, acc}, fun, after_fun)
{list, next_acc} when is_list(list) ->
do_list_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun,
&Enumerable.List.reduce(list, &1, fun))
{enum, next_acc} ->
inner = &do_resource_each(&1, &2, fun)
do_enum_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun,
&Enumerable.reduce(enum, &1, inner))
end
end
defp do_list_resource(next_acc, next_fun, acc, fun, after_fun, reduce) do
try do
reduce.(acc)
catch
kind, reason ->
stacktrace = System.stacktrace
after_fun.(next_acc)
:erlang.raise(kind, reason, stacktrace)
else
{:done, acc} ->
do_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun)
{:halted, acc} ->
do_resource(next_acc, next_fun, {:halt, acc}, fun, after_fun)
{:suspended, acc, c} ->
{:suspended, acc, &do_list_resource(next_acc, next_fun, &1, fun, after_fun, c)}
end
end
defp do_enum_resource(next_acc, next_fun, {op, acc}, fun, after_fun, reduce) do
try do
reduce.({op, [:outer | acc]})
catch
kind, reason ->
stacktrace = System.stacktrace
after_fun.(next_acc)
:erlang.raise(kind, reason, stacktrace)
else
{:halted, [:outer | acc]} ->
do_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun)
{:halted, [:inner | acc]} ->
do_resource(next_acc, next_fun, {:halt, acc}, fun, after_fun)
{:done, [_ | acc]} ->
do_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun)
{:suspended, [_ | acc], c} ->
{:suspended, acc, &do_enum_resource(next_acc, next_fun, &1, fun, after_fun, c)}
end
end
defp do_resource_each(x, [:outer | acc], f) do
case f.(x, acc) do
{:halt, res} -> {:halt, [:inner | res]}
{op, res} -> {op, [:outer | res]}
end
end
@doc """
Emits a sequence of values for the given accumulator.
Successive values are generated by calling `next_fun` with the previous
accumulator and it must return a tuple with the current value and next
accumulator. The enumeration finishes if it returns `nil`.
## Examples
iex> Stream.unfold(5, fn 0 -> nil; n -> {n, n-1} end) |> Enum.to_list()
[5, 4, 3, 2, 1]
"""
@spec unfold(acc, (acc -> {element, acc} | nil)) :: Enumerable.t
def unfold(next_acc, next_fun) do
&do_unfold(next_acc, next_fun, &1, &2)
end
defp do_unfold(next_acc, next_fun, {:suspend, acc}, fun) do
{:suspended, acc, &do_unfold(next_acc, next_fun, &1, fun)}
end
defp do_unfold(_next_acc, _next_fun, {:halt, acc}, _fun) do
{:halted, acc}
end
defp do_unfold(next_acc, next_fun, {:cont, acc}, fun) do
case next_fun.(next_acc) do
nil -> {:done, acc}
{v, next_acc} -> do_unfold(next_acc, next_fun, fun.(v, acc), fun)
end
end
## Helpers
@compile {:inline, lazy: 2, lazy: 3, lazy: 4}
defp lazy(%Stream{done: nil, funs: funs} = lazy, fun),
do: %{lazy | funs: [fun | funs]}
defp lazy(enum, fun),
do: %Stream{enum: enum, funs: [fun]}
defp lazy(%Stream{done: nil, funs: funs, accs: accs} = lazy, acc, fun),
do: %{lazy | funs: [fun | funs], accs: [acc | accs]}
defp lazy(enum, acc, fun),
do: %Stream{enum: enum, funs: [fun], accs: [acc]}
defp lazy(%Stream{done: nil, funs: funs, accs: accs} = lazy, acc, fun, done),
do: %{lazy | funs: [fun | funs], accs: [acc | accs], done: done}
defp lazy(enum, acc, fun, done),
do: %Stream{enum: enum, funs: [fun], accs: [acc], done: done}
end
defimpl Enumerable, for: Stream do
@compile :inline_list_funs
def reduce(lazy, acc, fun) do
do_reduce(lazy, acc, fn x, [acc] ->
{reason, acc} = fun.(x, acc)
{reason, [acc]}
end)
end
def count(_lazy) do
{:error, __MODULE__}
end
def member?(_lazy, _value) do
{:error, __MODULE__}
end
defp do_reduce(%Stream{enum: enum, funs: funs, accs: accs, done: done}, acc, fun) do
composed = :lists.foldl(fn fun, acc -> fun.(acc) end, fun, funs)
do_each(&Enumerable.reduce(enum, &1, composed),
done && {done, fun}, :lists.reverse(accs), acc)
end
defp do_each(reduce, done, accs, {command, acc}) do
case reduce.({command, [acc | accs]}) do
{:suspended, [acc | accs], continuation} ->
{:suspended, acc, &do_each(continuation, done, accs, &1)}
{:halted, accs} ->
do_done {:halted, accs}, done
{:done, accs} ->
do_done {:done, accs}, done
end
end
defp do_done({reason, [acc | _]}, nil), do: {reason, acc}
defp do_done({reason, [acc | t]}, {done, fun}) do
[h | _] = Enum.reverse(t)
case done.([acc, h], fun) do
{:cont, [acc | _]} -> {reason, acc}
{:halt, [acc | _]} -> {:halted, acc}
{:suspend, [acc | _]} -> {:suspended, acc, &({:done, elem(&1, 1)})}
end
end
end
defimpl Inspect, for: Stream do
import Inspect.Algebra
def inspect(%{enum: enum, funs: funs}, opts) do
inner = [enum: enum, funs: Enum.reverse(funs)]
concat ["#Stream<", to_doc(inner, opts), ">"]
end
end
| 30.427677 | 112 | 0.615267 |
93f8cb2f288998b999cec8aacc56ca30314b476e | 146 | ex | Elixir | test/support/repo.ex | brunolouvem/ecto_auto_filter | 48039fcfb9edeb9e2c5d076f298c0a2811dc4a48 | [
"MIT"
] | 2 | 2021-01-12T14:59:09.000Z | 2021-01-12T14:59:50.000Z | test/support/repo.ex | brunolouvem/ecto_auto_filter | 48039fcfb9edeb9e2c5d076f298c0a2811dc4a48 | [
"MIT"
] | null | null | null | test/support/repo.ex | brunolouvem/ecto_auto_filter | 48039fcfb9edeb9e2c5d076f298c0a2811dc4a48 | [
"MIT"
] | null | null | null | defmodule EctoAutoFilter.Test.Repo do
@moduledoc false
use Ecto.Repo,
otp_app: :ecto_auto_filter,
adapter: Ecto.Adapters.Postgres
end
| 20.857143 | 37 | 0.760274 |
93f8eac25bb4564bf42dd5698ed2cc66c6d4ccd4 | 13,482 | ex | Elixir | lib/cldr/plug/plug_set_locale.ex | szTheory/cldr | 30e67d2e5ff13a61c29586a7cfad79995b070e1a | [
"Apache-2.0"
] | null | null | null | lib/cldr/plug/plug_set_locale.ex | szTheory/cldr | 30e67d2e5ff13a61c29586a7cfad79995b070e1a | [
"Apache-2.0"
] | null | null | null | lib/cldr/plug/plug_set_locale.ex | szTheory/cldr | 30e67d2e5ff13a61c29586a7cfad79995b070e1a | [
"Apache-2.0"
] | null | null | null | if Code.ensure_loaded?(Plug) do
defmodule Cldr.Plug.SetLocale do
@moduledoc """
Sets the Cldr and/or Gettext locales derived from the accept-language
header, a query parameter, a url parameter, a body parameter or the
session.
## Options
* `:apps` - list of apps for which to set locale.
See the apps configuration section.
* `:from` - where in the request to look for the locale.
The default is `[:session, :accept_language]`. The valid
options are:
* `:accept_language` will parse the `accept-language` header
and finds the best matched configured locale
* `:path` will look for a locale by examining `conn.path_params`
* `:query` will look for a locale by examining `conn.query_params`
* `:body` will look for a locale by examining `conn.body_params`
* `:cookie` will look for a locale in the request cookie(s)
* `:session` will look for a locale in the session
* `:default` - the default locale to set if no locale is
found by other configured methods. It can be a string like "en"
or a `Cldr.LanguageTag` struct. The default is
`Cldr.default_locale/1`
* `:gettext` - the name of the `Gettext` backend module upon which
the locale will be validated. This option is not required if a
gettext module is specified in the `:apps` configuration.
* `:cldr` - the name of the `Cldr` backend module upon which
the locale will be validated. This option is not required if a
gettext module is specified in the `:apps` configuration.
* `:session_key` - defines the key used to look for the locale
in the session. The default is `locale`.
If a locale is found then `conn.private[:cldr_locale]` is also set.
It can be retrieved with `Cldr.Plug.SetLocale.get_cldr_locale/1`.
## App configuration
The `:apps` configuration key defines which applications will have
their locale *set* by this plug.
`Cldr.Plug.SetLocale` can set the locale for `cldr`, `gettext` or both.
The basic configuration of the `:app` key is an atom, or list of atoms,
containing one or both of these app names. For example:
apps: :cldr
apps: :gettext
apps: [:cldr, :gettext]
In each of these cases, the locale is set globally
**for the current process**.
Sometimes setting the locale for only a specific backend is required.
In this case, configure the `:apps` key as a keyword list pairing an
application with the required backend module. The value `:global` signifies
setting the local for the global context. For example:
apps: [cldr: MyApp.Cldr]
apps: [gettext: MyAppGettext]
apps: [gettext: :global]
apps: [cldr: MyApp.Cldr, gettext: MyAppGettext]
## Examples
# Will set the global locale for the current process
# for both `:cldr` and `:gettext`
plug Cldr.Plug.SetLocale,
apps: [:cldr, :gettext],
from: [:query, :path, :body, :cookie, :accept_language],
param: "locale",
gettext: GetTextModule,
cldr: MyApp.Cldr
session_key: "cldr_locale"
# Will set the backend only locale for the current process
# for both `:cldr` and `:gettext`
plug Cldr.Plug.SetLocale,
apps: [cldr: MyApp.Cldr, gettext: GetTextModule],
from: [:query, :path, :body, :cookie, :accept_language],
param: "locale",
session_key: "cldr_locale"
# Will set the backend only locale for the current process
# for `:cldr` and globally for `:gettext`
plug Cldr.Plug.SetLocale,
apps: [cldr: MyApp.Cldr, gettext: :global],
from: [:query, :path, :body, :cookie, :accept_language],
param: "locale",
session_key: "cldr_locale"
"""
import Plug.Conn
require Logger
alias Cldr.AcceptLanguage
alias Cldr.Config
@default_apps [cldr: :global]
@default_from [:session, :accept_language]
@default_param_name "locale"
@default_session_key "cldr_locale"
@from_options [:accept_language, :path, :body, :query, :session, :cookie]
@app_options [:cldr, :gettext]
@language_header "accept-language"
@doc false
def init(options) do
options
|> validate_apps(options[:apps])
|> validate_from(options[:from])
|> validate_param(options[:param])
|> validate_cldr(options[:cldr])
|> validate_gettext(options[:gettext])
|> validate_default(options[:default])
|> validate_session_key(options[:session_key])
end
@doc false
def call(conn, options) do
if locale = locale_from_params(conn, options[:from], options) || options[:default] do
Enum.each(options[:apps], fn app ->
put_locale(app, locale, options)
end)
end
put_private(conn, :cldr_locale, locale)
end
@doc """
Return the locale set by `Cldr.Plug.SetLocale`
"""
def get_cldr_locale(conn) do
conn.private[:cldr_locale]
end
defp locale_from_params(conn, from, options) do
Enum.reduce_while(from, nil, fn param, _acc ->
conn
|> fetch_param(param, options[:param], options)
|> return_if_valid_locale
end)
end
defp fetch_param(conn, :accept_language, _param, options) do
case get_req_header(conn, @language_header) do
[accept_language] -> AcceptLanguage.best_match(accept_language, options[:cldr])
[accept_language | _] -> AcceptLanguage.best_match(accept_language, options[:cldr])
[] -> nil
end
end
defp fetch_param(
%Plug.Conn{query_params: %Plug.Conn.Unfetched{aspect: :query_params}} = conn,
:query,
param,
options
) do
conn = fetch_query_params(conn)
fetch_param(conn, :query, param, options)
end
defp fetch_param(conn, :query, param, options) do
conn
|> Map.get(:query_params)
|> Map.get(param)
|> Cldr.validate_locale(options[:cldr])
end
defp fetch_param(conn, :path, param, options) do
conn
|> Map.get(:path_params)
|> Map.get(param)
|> Cldr.validate_locale(options[:cldr])
end
defp fetch_param(conn, :body, param, options) do
conn
|> Map.get(:body_params)
|> Map.get(param)
|> Cldr.validate_locale(options[:cldr])
end
defp fetch_param(conn, :session, _param, options) do
conn
|> get_session(options[:session_key])
|> Cldr.validate_locale(options[:cldr])
end
defp fetch_param(conn, :cookie, param, options) do
conn
|> Map.get(:cookies)
|> Map.get(param)
|> Cldr.validate_locale(options[:cldr])
end
defp return_if_valid_locale(nil) do
{:cont, nil}
end
defp return_if_valid_locale({:error, _}) do
{:cont, nil}
end
defp return_if_valid_locale({:ok, locale}) do
{:halt, locale}
end
defp put_locale({:cldr, :global}, locale, _options) do
Cldr.put_locale(locale)
end
# Deprecated option :all. Use :global
defp put_locale({:cldr, :all}, locale, _options) do
Cldr.put_locale(locale)
end
defp put_locale({:cldr, backend}, locale, _options) do
backend.put_locale(locale)
end
defp put_locale({:gettext, _}, %Cldr.LanguageTag{gettext_locale_name: nil} = locale, _options) do
Logger.warn(
"Locale #{inspect(locale.requested_locale_name)} does not have a known " <>
"Gettext locale. No Gettext locale has been set."
)
nil
end
defp put_locale(
{:gettext, :global},
%Cldr.LanguageTag{gettext_locale_name: locale_name},
_options
) do
{:ok, apply(Gettext, :put_locale, [locale_name])}
end
# Deprecated option :all. Use :global
defp put_locale(
{:gettext, :all},
%Cldr.LanguageTag{gettext_locale_name: locale_name},
_options
) do
{:ok, apply(Gettext, :put_locale, [locale_name])}
end
defp put_locale(
{:gettext, backend},
%Cldr.LanguageTag{gettext_locale_name: locale_name},
_options
) do
{:ok, apply(Gettext, :put_locale, [backend, locale_name])}
end
defp validate_apps(options, nil), do: Keyword.put(options, :apps, @default_apps)
defp validate_apps(options, app) when is_atom(app) do
options
|> Keyword.put(:apps, [app])
|> validate_apps([app])
end
defp validate_apps(options, apps) when is_list(apps) do
app_config =
Enum.map(apps, fn
{app, scope} ->
validate_app_and_scope!(app, scope)
{app, scope}
app ->
validate_app_and_scope!(app, nil)
{app, :global}
end)
Keyword.put(options, :apps, app_config)
end
defp validate_apps(_options, apps) do
raise(
ArgumentError,
"Invalid apps list: #{inspect(apps)}."
)
end
defp validate_app_and_scope!(app, nil) when app in @app_options do
:ok
end
defp validate_app_and_scope!(app, :global) when app in @app_options do
:ok
end
# Deprecated option :all. Use :global
defp validate_app_and_scope!(app, :all) when app in @app_options do
:ok
end
defp validate_app_and_scope!(app, module) when not is_nil(app) and is_atom(module) do
cond do
app in @app_options && Code.ensure_loaded?(module) ->
:ok
app in @app_options ->
raise ArgumentError, "Backend module #{inspect(module)} is unavailable"
true ->
raise ArgumentError, "App #{inspect(app)} is unknown"
end
end
defp validate_app_and_scope!(app, scope) do
raise(
ArgumentError,
"Invalid app #{inspect(app)} or scope #{inspect(scope)} detected."
)
end
defp validate_from(options, nil), do: Keyword.put(options, :from, @default_from)
defp validate_from(options, from) when is_atom(from) do
options
|> Keyword.put(:from, [from])
|> validate_from([from])
end
defp validate_from(options, from) when is_list(from) do
Enum.each(from, fn f ->
if f not in @from_options do
raise(
ArgumentError,
"Invalid :from option #{inspect(f)} detected. " <>
" Valid :from options are #{inspect(@from_options)}"
)
end
end)
options
end
defp validate_from(_options, from) do
raise(
ArgumentError,
"Invalid :from list #{inspect(from)} detected. " <>
"Valid from options are #{inspect(@from_options)}"
)
end
defp validate_param(options, nil), do: Keyword.put(options, :param, @default_param_name)
defp validate_param(options, param) when is_binary(param), do: options
defp validate_param(options, param) when is_atom(param) do
validate_from(options, param)
end
defp validate_param(_options, param) do
raise(
ArgumentError,
"Invalid :param #{inspect(param)} detected. " <> ":param must be a string"
)
end
defp validate_default(options, nil) do
default = options[:cldr].default_locale()
Keyword.put(options, :default, default)
end
defp validate_default(options, default) do
case Cldr.validate_locale(default, options[:cldr]) do
{:ok, locale} -> Keyword.put(options, :default, locale)
{:error, {exception, reason}} -> raise exception, reason
end
end
# No configured gettext. See if there is one configured
# on the Cldr backend
defp validate_gettext(options, nil) do
gettext = options[:cldr].__cldr__(:config).gettext
if gettext && get_in(options, [:apps, :gettext]) do
Keyword.put(options, :gettext, gettext)
else
options
end
end
defp validate_gettext(options, gettext) do
case Code.ensure_compiled(gettext) do
{:error, _} ->
raise ArgumentError, "Gettext module #{inspect(gettext)} is not known"
{:module, _} ->
options
end
end
defp validate_session_key(options, nil),
do: Keyword.put(options, :session_key, @default_session_key)
defp validate_session_key(options, session_key) when is_binary(session_key), do: options
defp validate_session_key(_options, session_key) do
raise(
ArgumentError,
"Invalid :session_key #{inspect(session_key)} detected. " <>
":session_key must be a string"
)
end
defp validate_cldr(options, nil) do
cldr_backend = Keyword.get(options[:apps], :cldr)
if cldr_backend && cldr_backend not in [:all, :global] do
options = Keyword.put(options, :cldr, cldr_backend)
validate_cldr(options, cldr_backend)
else
raise ArgumentError, "A Cldr backend module must be configured"
end
end
defp validate_cldr(options, backend) when is_atom(backend) do
unless Config.ensure_compiled?(backend) and function_exported?(backend, :__cldr__, 1) do
raise ArgumentError,
"#{inspect(backend)} is either not known or does not appear to be a Cldr backend module"
else
Keyword.put(options, :cldr, backend)
end
end
end
end
| 30.710706 | 102 | 0.623572 |
93f8f922b8ccbb198a169c161be530d291166eaf | 3,075 | exs | Elixir | test/content/audio/stopped_train_test.exs | mbta/realtime_signs | 3fd8cbc26ce2b0820e608e60fe12135dab5def69 | [
"MIT"
] | 1 | 2022-01-24T12:39:05.000Z | 2022-01-24T12:39:05.000Z | test/content/audio/stopped_train_test.exs | mbta/realtime_signs | 3fd8cbc26ce2b0820e608e60fe12135dab5def69 | [
"MIT"
] | 40 | 2021-05-05T10:14:25.000Z | 2022-03-31T18:34:15.000Z | test/content/audio/stopped_train_test.exs | mbta/realtime_signs | 3fd8cbc26ce2b0820e608e60fe12135dab5def69 | [
"MIT"
] | 1 | 2022-03-20T21:08:12.000Z | 2022-03-20T21:08:12.000Z | defmodule Content.Audio.StoppedTrainTest do
use ExUnit.Case, async: true
import ExUnit.CaptureLog
describe "to_params/1" do
test "Serializes correctly" do
audio = %Content.Audio.StoppedTrain{destination: :alewife, stops_away: 2}
assert Content.Audio.to_params(audio) ==
{:canned,
{"115",
[
"501",
"21000",
"507",
"21000",
"4000",
"21000",
"533",
"21000",
"641",
"21000",
"5002",
"21000",
"534"
], :audio}}
end
test "Uses singular 'stop' if 1 stop away" do
audio = %Content.Audio.StoppedTrain{destination: :alewife, stops_away: 1}
assert Content.Audio.to_params(audio) ==
{:canned,
{"115",
[
"501",
"21000",
"507",
"21000",
"4000",
"21000",
"533",
"21000",
"641",
"21000",
"5001",
"21000",
"535"
], :audio}}
end
test "Returns :ad_hoc params for southbound destination" do
audio = %Content.Audio.StoppedTrain{destination: :southbound, stops_away: 2}
assert Content.Audio.to_params(audio) ==
{:ad_hoc, {"The next Southbound train is stopped 2 stops away", :audio}}
end
test "Returns ad_hoc audio for valid destinations" do
audio = %Content.Audio.StoppedTrain{
destination: :westbound,
stops_away: 2
}
assert Content.Audio.to_params(audio) ==
{:ad_hoc, {"The next Westbound train is stopped 2 stops away", :audio}}
end
test "Handles unknown destinations gracefully" do
audio = %Content.Audio.StoppedTrain{destination: :unknown, stops_away: 2}
log =
capture_log([level: :error], fn ->
assert Content.Audio.to_params(audio) == nil
end)
assert log =~ "unknown destination"
end
end
describe "from_message/1" do
test "Converts a stopped train message with known headsign" do
msg = %Content.Message.StoppedTrain{destination: :forest_hills, stops_away: 1}
assert Content.Audio.StoppedTrain.from_message(msg) ==
%Content.Audio.StoppedTrain{destination: :forest_hills, stops_away: 1}
end
test "Returns nil for irrelevant message" do
msg = %Content.Message.Empty{}
assert Content.Audio.StoppedTrain.from_message(msg) == nil
end
test "when the trian is stopped 0 stops away, does not announce that it is stopped 0 stops away" do
msg = %Content.Message.StoppedTrain{destination: :forest_hills, stops_away: 0}
assert Content.Audio.StoppedTrain.from_message(msg) == nil
end
end
end
| 30.445545 | 103 | 0.524878 |
93f934a71382d9ce4acdc527ab7b2b4086f449db | 201 | exs | Elixir | config/test.exs | DockYard/inquisitor | bf219f5659c113a6a76704c64002b0eb6fc49730 | [
"MIT"
] | 156 | 2016-02-07T03:29:43.000Z | 2022-02-25T23:07:00.000Z | config/test.exs | DockYard/inquisitor | bf219f5659c113a6a76704c64002b0eb6fc49730 | [
"MIT"
] | 13 | 2016-06-24T02:10:28.000Z | 2020-07-04T07:53:34.000Z | config/test.exs | DockYard/inquisitor | bf219f5659c113a6a76704c64002b0eb6fc49730 | [
"MIT"
] | 20 | 2016-05-09T16:46:39.000Z | 2022-01-27T15:54:14.000Z | use Mix.Config
config :inquisitor, Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "inquisitor_test",
pool: Ecto.Adapters.SQL.Sandbox,
size: 1
| 20.1 | 34 | 0.721393 |
93f93e196979c59ec76dc9e44d947eda7530fc84 | 851 | exs | Elixir | config/dev.exs | appunite/imager | b41f76be975faf4d255ad0a5d8e972df1924356b | [
"MIT"
] | 55 | 2018-10-02T14:56:04.000Z | 2021-12-10T21:08:07.000Z | config/dev.exs | appunite/imager | b41f76be975faf4d255ad0a5d8e972df1924356b | [
"MIT"
] | 27 | 2018-10-02T14:57:09.000Z | 2019-04-11T07:51:11.000Z | config/dev.exs | appunite/imager | b41f76be975faf4d255ad0a5d8e972df1924356b | [
"MIT"
] | 3 | 2018-10-12T18:31:41.000Z | 2020-04-22T15:26:25.000Z | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :imager, ImagerWeb.Endpoint,
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: []
config :imager, :port, 4000
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
config :imager, :stores, %{
"local" => %{
store: {Imager.Store.Local, dir: "test/fixtures/"},
cache: {Imager.Store.Blackhole, []}
}
}
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
| 28.366667 | 68 | 0.730905 |
93f96843d50f81aae5babdf128b8d689e220cc75 | 7,042 | exs | Elixir | server/priv/repo/migrations/20211116214523_create_realtime_apply_rls_function.exs | profencer/realtime | b3a20e8278276a98d47c2c938abe73cfd9e69a63 | [
"Apache-2.0"
] | 1 | 2021-05-04T22:04:46.000Z | 2021-05-04T22:04:46.000Z | server/priv/repo/migrations/20211116214523_create_realtime_apply_rls_function.exs | profencer/realtime | b3a20e8278276a98d47c2c938abe73cfd9e69a63 | [
"Apache-2.0"
] | null | null | null | server/priv/repo/migrations/20211116214523_create_realtime_apply_rls_function.exs | profencer/realtime | b3a20e8278276a98d47c2c938abe73cfd9e69a63 | [
"Apache-2.0"
] | null | null | null | defmodule Realtime.RLS.Repo.Migrations.CreateRealtimeApplyRlsFunction do
use Ecto.Migration
def change do
execute "create type realtime.action as enum ('INSERT', 'UPDATE', 'DELETE', 'TRUNCATE', 'ERROR');"
execute "create type realtime.wal_rls as (
wal jsonb,
is_rls_enabled boolean,
users uuid[],
errors text[]
);"
execute "create function realtime.apply_rls(wal jsonb, max_record_bytes int = 1024 * 1024)
returns realtime.wal_rls
language plpgsql
volatile
as $$
declare
-- Regclass of the table e.g. public.notes
entity_ regclass = (quote_ident(wal ->> 'schema') || '.' || quote_ident(wal ->> 'table'))::regclass;
-- I, U, D, T: insert, update ...
action realtime.action = (
case wal ->> 'action'
when 'I' then 'INSERT'
when 'U' then 'UPDATE'
when 'D' then 'DELETE'
when 'T' then 'TRUNCATE'
else 'ERROR'
end
);
-- Is row level security enabled for the table
is_rls_enabled bool = relrowsecurity from pg_class where oid = entity_;
-- Subscription vars
user_id uuid;
email varchar(255);
user_has_access bool;
is_visible_to_user boolean;
visible_to_user_ids uuid[] = '{}';
-- user subscriptions to the wal record's table
subscriptions realtime.subscription[] =
array_agg(sub)
from
realtime.subscription sub
where
sub.entity = entity_;
-- structured info for wal's columns
columns realtime.wal_column[] =
array_agg(
(
x->>'name',
x->>'type',
realtime.cast((x->'value') #>> '{}', (x->>'type')::regtype),
(pks ->> 'name') is not null,
pg_catalog.has_column_privilege('authenticated', entity_, x->>'name', 'SELECT')
)::realtime.wal_column
)
from
jsonb_array_elements(wal -> 'columns') x
left join jsonb_array_elements(wal -> 'pk') pks
on (x ->> 'name') = (pks ->> 'name');
-- previous identity values for update/delete
old_columns realtime.wal_column[] =
array_agg(
(
x->>'name',
x->>'type',
realtime.cast((x->'value') #>> '{}', (x->>'type')::regtype),
(pks ->> 'name') is not null,
pg_catalog.has_column_privilege('authenticated', entity_, x->>'name', 'SELECT')
)::realtime.wal_column
)
from
jsonb_array_elements(wal -> 'identity') x
left join jsonb_array_elements(wal -> 'pk') pks
on (x ->> 'name') = (pks ->> 'name');
output jsonb;
-- Error states
error_record_exceeds_max_size boolean = octet_length(wal::text) > max_record_bytes;
error_unauthorized boolean = not pg_catalog.has_any_column_privilege('authenticated', entity_, 'SELECT');
errors text[] = case
when error_record_exceeds_max_size then array['Error 413: Payload Too Large']
else '{}'::text[]
end;
begin
-- The 'authenticated' user does not have SELECT permission on any of the columns for the entity_
if error_unauthorized is true then
return (
null,
null,
visible_to_user_ids,
array['Error 401: Unauthorized']
)::realtime.wal_rls;
end if;
-------------------------------
-- Build Output JSONB Object --
-------------------------------
output = jsonb_build_object(
'schema', wal ->> 'schema',
'table', wal ->> 'table',
'type', action,
'commit_timestamp', (wal ->> 'timestamp')::text::timestamp with time zone,
'columns', (
select
jsonb_agg(
jsonb_build_object(
'name', pa.attname,
'type', pt.typname
)
order by pa.attnum asc
)
from
pg_attribute pa
join pg_type pt
on pa.atttypid = pt.oid
where
attrelid = entity_
and attnum > 0
and pg_catalog.has_column_privilege('authenticated', entity_, pa.attname, 'SELECT')
)
)
-- Add \"record\" key for insert and update
|| case
when error_record_exceeds_max_size then jsonb_build_object('record', '{}'::jsonb)
when action in ('INSERT', 'UPDATE') then
jsonb_build_object(
'record',
(select jsonb_object_agg((c).name, (c).value) from unnest(columns) c where (c).is_selectable)
)
else '{}'::jsonb
end
-- Add \"old_record\" key for update and delete
|| case
when error_record_exceeds_max_size then jsonb_build_object('old_record', '{}'::jsonb)
when action in ('UPDATE', 'DELETE') then
jsonb_build_object(
'old_record',
(select jsonb_object_agg((c).name, (c).value) from unnest(old_columns) c where (c).is_selectable)
)
else '{}'::jsonb
end;
if action in ('TRUNCATE', 'DELETE') then
visible_to_user_ids = array_agg(s.user_id) from unnest(subscriptions) s;
else
-- If RLS is on and someone is subscribed to the table prep
if is_rls_enabled and array_length(subscriptions, 1) > 0 then
perform
set_config('role', 'authenticated', true),
set_config('request.jwt.claim.role', 'authenticated', true);
if (select 1 from pg_prepared_statements where name = 'walrus_rls_stmt' limit 1) > 0 then
deallocate walrus_rls_stmt;
end if;
execute realtime.build_prepared_statement_sql('walrus_rls_stmt', entity_, columns);
end if;
-- For each subscribed user
for user_id, email, is_visible_to_user in (
select
subs.user_id,
subs.email,
realtime.is_visible_through_filters(columns, subs.filters)
from
unnest(subscriptions) subs
)
loop
if is_visible_to_user then
-- If RLS is off, add to visible users
if not is_rls_enabled then
visible_to_user_ids = visible_to_user_ids || user_id;
else
-- Check if RLS allows the user to see the record
perform
set_config('request.jwt.claim.sub', user_id::text, true),
set_config('request.jwt.claim.email', email::text, true);
execute 'execute walrus_rls_stmt' into user_has_access;
if user_has_access then
visible_to_user_ids = visible_to_user_ids || user_id;
end if;
end if;
end if;
end loop;
perform (
set_config('role', null, true)
);
end if;
return (
output,
is_rls_enabled,
visible_to_user_ids,
errors
)::realtime.wal_rls;
end;
$$;"
end
end
| 33.061033 | 111 | 0.556802 |
93f9d5bf5d21a7da4a13b326777388a455a9a40d | 212 | ex | Elixir | sdr-web/web/controllers/api/v1/math_controller.ex | korczis/hackrf-rust | 73c76c23eb58ddfaa484f40e4b800f12b31d0a50 | [
"MIT"
] | 1 | 2022-02-15T23:17:51.000Z | 2022-02-15T23:17:51.000Z | sdr-web/web/controllers/api/v1/math_controller.ex | korczis/hackrf-rust | 73c76c23eb58ddfaa484f40e4b800f12b31d0a50 | [
"MIT"
] | null | null | null | sdr-web/web/controllers/api/v1/math_controller.ex | korczis/hackrf-rust | 73c76c23eb58ddfaa484f40e4b800f12b31d0a50 | [
"MIT"
] | 3 | 2020-05-09T10:08:07.000Z | 2021-02-22T11:42:00.000Z | defmodule SdrWeb.API.V1.MathController do
use SdrWeb.Web, :controller
require Logger
def calculate(conn, params) do
Logger.info(params)
conn
|> json(%{msg: "Not yet implemented!"})
end
end
| 16.307692 | 43 | 0.683962 |
93f9eabec84aeb4c55b1841f9d768e3b25a59f70 | 427 | ex | Elixir | lib/gscraper_web/controllers/dashboard_controller.ex | longnd/elixir-gscraper | 894570afd89e54b80ca591a56a182da55ac6ee61 | [
"MIT"
] | null | null | null | lib/gscraper_web/controllers/dashboard_controller.ex | longnd/elixir-gscraper | 894570afd89e54b80ca591a56a182da55ac6ee61 | [
"MIT"
] | 25 | 2021-03-23T07:27:21.000Z | 2021-10-31T15:09:52.000Z | lib/gscraper_web/controllers/dashboard_controller.ex | longnd/elixir-gscraper | 894570afd89e54b80ca591a56a182da55ac6ee61 | [
"MIT"
] | null | null | null | defmodule GscraperWeb.DashboardController do
use GscraperWeb, :controller
alias Gscraper.Search.Schemas.KeywordFile
alias Gscraper.Search.Searches
def index(conn, _params) do
changeset = KeywordFile.create_changeset(%KeywordFile{})
keywords =
conn
|> get_current_user()
|> Searches.list_keywords_by_user()
render(conn, "index.html", changeset: changeset, keywords: keywords)
end
end
| 23.722222 | 72 | 0.733021 |
93f9eed16ba9f5551dfa73d865f1f6d68ce43b48 | 1,766 | exs | Elixir | config/prod.exs | mammenj/elixir-auction | 94941bb820a221e3917014919d97571784ac9388 | [
"Apache-2.0"
] | null | null | null | config/prod.exs | mammenj/elixir-auction | 94941bb820a221e3917014919d97571784ac9388 | [
"Apache-2.0"
] | null | null | null | config/prod.exs | mammenj/elixir-auction | 94941bb820a221e3917014919d97571784ac9388 | [
"Apache-2.0"
] | null | null | null | import Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :auction_web, AuctionWeb.Endpoint,
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :auction_web, AuctionWeb.Endpoint,
# ...,
# url: [host: "example.com", port: 443],
# https: [
# ...,
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :auction_web, AuctionWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
| 36.040816 | 66 | 0.703851 |
93f9faff59c6d1b39d812369a670887c2285d487 | 208 | exs | Elixir | examples/swagger_demo/priv/repo/migrations/20170226053859_create_user.exs | Whatnot-Inc/bureaucrat | d0634c6017dc68f8a23078cbc8c181a4b2d3e6db | [
"Unlicense"
] | 326 | 2015-08-19T10:05:07.000Z | 2022-03-28T08:49:33.000Z | examples/swagger_demo/priv/repo/migrations/20170226053859_create_user.exs | Whatnot-Inc/bureaucrat | d0634c6017dc68f8a23078cbc8c181a4b2d3e6db | [
"Unlicense"
] | 64 | 2015-08-19T06:44:19.000Z | 2022-03-29T06:23:34.000Z | examples/swagger_demo/priv/repo/migrations/20170226053859_create_user.exs | Whatnot-Inc/bureaucrat | d0634c6017dc68f8a23078cbc8c181a4b2d3e6db | [
"Unlicense"
] | 66 | 2016-01-08T20:40:40.000Z | 2022-03-03T02:15:15.000Z | defmodule SwaggerDemo.Repo.Migrations.CreateUser do
use Ecto.Migration
def change do
create table(:users) do
add :name, :string
add :email, :string
timestamps()
end
end
end
| 14.857143 | 51 | 0.658654 |
93fa1b77410ec6522b800c6e43bbe7179f77641b | 724 | ex | Elixir | lib/ramona.ex | unleashed-coding/ramona | 98a114063edc0365a3750d8065b351b1d08de4b3 | [
"Apache-2.0"
] | 1 | 2019-01-24T19:58:04.000Z | 2019-01-24T19:58:04.000Z | lib/ramona.ex | unleashed-coding/ramona | 98a114063edc0365a3750d8065b351b1d08de4b3 | [
"Apache-2.0"
] | null | null | null | lib/ramona.ex | unleashed-coding/ramona | 98a114063edc0365a3750d8065b351b1d08de4b3 | [
"Apache-2.0"
] | null | null | null | defmodule Ramona do
@moduledoc false
use Application
alias Alchemy.{Client, Cogs}
def start(_type, _args) do
case Application.get_env(:ramona, :token) do
nil ->
raise "TOKEN environment variable is not set"
token ->
prefix = Application.fetch_env!(:ramona, :prefix)
bootstrap(token, prefix)
end
end
defp bootstrap(token, prefix) do
run = Client.start(token)
load_modules()
Cogs.set_prefix(prefix)
run
end
defp load_modules do
use Ramona.Events
use Ramona.Events.Macros
use Ramona.Commands.Basic
use Ramona.Commands.Morse
use Ramona.Commands.Macros
use Ramona.Commands.Random
use Ramona.Commands.Moderation
end
end
| 21.294118 | 57 | 0.678177 |
93fa1d12cba3baffd4a1427333697c2b718c77fd | 86 | ex | Elixir | web/views/coherence/confirmation_view.ex | Symbolk/social_network | 11df1ba9bc19fd140b630ac2abbd4b13b42def92 | [
"MIT"
] | 17 | 2017-01-02T10:38:28.000Z | 2021-02-28T22:16:54.000Z | web/views/coherence/confirmation_view.ex | Symbolk/social_network | 11df1ba9bc19fd140b630ac2abbd4b13b42def92 | [
"MIT"
] | null | null | null | web/views/coherence/confirmation_view.ex | Symbolk/social_network | 11df1ba9bc19fd140b630ac2abbd4b13b42def92 | [
"MIT"
] | 2 | 2017-01-09T13:02:13.000Z | 2018-06-16T22:01:53.000Z | defmodule Coherence.ConfirmationView do
use SocialNetwork.Coherence.Web, :view
end
| 17.2 | 40 | 0.825581 |
93fa36b7c976a8a8a9967bded98ece37f72519fc | 4,233 | exs | Elixir | apps/concierge_site/test/web/views/day_select_helper_test.exs | mbta/alerts_concierge | d8e643445ef06f80ca273f2914c6959daea146f6 | [
"MIT"
] | null | null | null | apps/concierge_site/test/web/views/day_select_helper_test.exs | mbta/alerts_concierge | d8e643445ef06f80ca273f2914c6959daea146f6 | [
"MIT"
] | 21 | 2021-03-12T17:05:30.000Z | 2022-02-16T21:48:35.000Z | apps/concierge_site/test/web/views/day_select_helper_test.exs | mbta/alerts_concierge | d8e643445ef06f80ca273f2914c6959daea146f6 | [
"MIT"
] | 1 | 2021-12-09T15:09:53.000Z | 2021-12-09T15:09:53.000Z | defmodule ConciergeSite.DaySelectHelperTest do
@moduledoc false
use ExUnit.Case, async: true
alias ConciergeSite.DaySelectHelper
test "render/1" do
html = Phoenix.HTML.safe_to_string(DaySelectHelper.render(:foo))
assert html =~ "<div class=\"day-selector\" data-selector=\"date\">"
assert html =~ "<div class=\"day-header\">Mon</div>"
assert html =~
"<input autocomplete=\"off\" name=\"foo[relevant_days][]\" type=\"checkbox\" value=\"monday\">"
assert html =~
"<input autocomplete=\"off\" name=\"foo[relevant_days][]\" type=\"checkbox\" value=\"tuesday\">"
assert html =~ "<div class=\"group-part invisible-no-js\">"
assert html =~ "<input autocomplete=\"off\" type=\"checkbox\" value=\"weekdays\">"
end
describe "render/2" do
test "with monday as string" do
html = Phoenix.HTML.safe_to_string(DaySelectHelper.render(:foo, ["monday"]))
assert html =~
"<input autocomplete=\"off\" name=\"foo[relevant_days][]\" type=\"checkbox\" value=\"monday\" checked>"
assert html =~
"<input autocomplete=\"off\" name=\"foo[relevant_days][]\" type=\"checkbox\" value=\"tuesday\">"
end
test "with monday as atom" do
html = Phoenix.HTML.safe_to_string(DaySelectHelper.render(:foo, [:monday]))
assert html =~
"<input autocomplete=\"off\" name=\"foo[relevant_days][]\" type=\"checkbox\" value=\"monday\" checked>"
assert html =~
"<input autocomplete=\"off\" name=\"foo[relevant_days][]\" type=\"checkbox\" value=\"tuesday\">"
end
test "weekdays" do
weekdays = ~w(monday tuesday wednesday thursday friday)a
html = Phoenix.HTML.safe_to_string(DaySelectHelper.render(:foo, weekdays))
assert html =~
"<input autocomplete=\"off\" name=\"foo[relevant_days][]\" type=\"checkbox\" value=\"monday\" checked>"
assert html =~
"<input autocomplete=\"off\" name=\"foo[relevant_days][]\" type=\"checkbox\" value=\"tuesday\" checked>"
assert html =~
"<input autocomplete=\"off\" name=\"foo[relevant_days][]\" type=\"checkbox\" value=\"thursday\" checked>"
assert html =~
"<input autocomplete=\"off\" name=\"foo[relevant_days][]\" type=\"checkbox\" value=\"friday\" checked>"
assert html =~ "<input autocomplete=\"off\" type=\"checkbox\" value=\"weekdays\" checked>"
end
test "weekend" do
weekdays = ~w(saturday sunday)a
html = Phoenix.HTML.safe_to_string(DaySelectHelper.render(:foo, weekdays))
assert html =~
"<input autocomplete=\"off\" name=\"foo[relevant_days][]\" type=\"checkbox\" value=\"saturday\" checked>"
assert html =~
"<input autocomplete=\"off\" name=\"foo[relevant_days][]\" type=\"checkbox\" value=\"sunday\" checked>"
assert html =~ "<input autocomplete=\"off\" type=\"checkbox\" value=\"weekend\" checked>"
end
test "weekdays and weekend" do
weekdays = ~w(monday tuesday wednesday thursday friday saturday sunday)a
html = Phoenix.HTML.safe_to_string(DaySelectHelper.render(:foo, weekdays))
assert html =~
"<input autocomplete=\"off\" name=\"foo[relevant_days][]\" type=\"checkbox\" value=\"monday\" checked>"
assert html =~
"<input autocomplete=\"off\" name=\"foo[relevant_days][]\" type=\"checkbox\" value=\"tuesday\" checked>"
assert html =~
"<input autocomplete=\"off\" name=\"foo[relevant_days][]\" type=\"checkbox\" value=\"thursday\" checked>"
assert html =~
"<input autocomplete=\"off\" name=\"foo[relevant_days][]\" type=\"checkbox\" value=\"friday\" checked>"
assert html =~
"<input autocomplete=\"off\" name=\"foo[relevant_days][]\" type=\"checkbox\" value=\"saturday\" checked>"
assert html =~
"<input autocomplete=\"off\" name=\"foo[relevant_days][]\" type=\"checkbox\" value=\"sunday\" checked>"
assert html =~ "<input autocomplete=\"off\" type=\"checkbox\" value=\"weekdays\" checked>"
assert html =~ "<input autocomplete=\"off\" type=\"checkbox\" value=\"weekend\" checked>"
end
end
end
| 41.5 | 120 | 0.612095 |
93fa56bdc1bd1f364c419ea703c3b4079237d75a | 3,052 | exs | Elixir | farmbot_core/test/bot_state_ng_test.exs | SeppPenner/farmbot_os | 39ba5c5880f8aef71792e2c009514bed1177089c | [
"MIT"
] | 1 | 2019-08-06T11:51:48.000Z | 2019-08-06T11:51:48.000Z | farmbot_core/test/bot_state_ng_test.exs | SeppPenner/farmbot_os | 39ba5c5880f8aef71792e2c009514bed1177089c | [
"MIT"
] | null | null | null | farmbot_core/test/bot_state_ng_test.exs | SeppPenner/farmbot_os | 39ba5c5880f8aef71792e2c009514bed1177089c | [
"MIT"
] | null | null | null | defmodule FarmbotCore.BotStateNGTest do
use ExUnit.Case, async: true
alias FarmbotCore.BotStateNG
describe "pins" do
test "adds pins to the state" do
orig = BotStateNG.new()
assert Enum.empty?(orig.pins)
one_pin =
BotStateNG.add_or_update_pin(orig, 10, 1, 2)
|> Ecto.Changeset.apply_changes()
assert one_pin.pins[10] == %{mode: 1, value: 2}
two_pins =
BotStateNG.add_or_update_pin(one_pin, 20, 1, 20)
|> Ecto.Changeset.apply_changes()
assert two_pins.pins[10] == %{mode: 1, value: 2}
assert two_pins.pins[20] == %{mode: 1, value: 20}
end
test "updates an existing pin" do
orig = BotStateNG.new()
assert Enum.empty?(orig.pins)
one_pin =
BotStateNG.add_or_update_pin(orig, 10, 1, 2)
|> Ecto.Changeset.apply_changes()
assert one_pin.pins[10] == %{mode: 1, value: 2}
one_pin_updated =
BotStateNG.add_or_update_pin(one_pin, 10, 1, 50)
|> Ecto.Changeset.apply_changes()
assert one_pin_updated.pins[10] == %{mode: 1, value: 50}
end
end
describe "informational_settings" do
test "sets update_available" do
orig = BotStateNG.new()
assert orig.informational_settings.update_available == false
mut1 =
BotStateNG.changeset(orig, %{informational_settings: %{update_available: true}})
|> Ecto.Changeset.apply_changes()
assert mut1.informational_settings.update_available == true
mut2 =
BotStateNG.changeset(orig, %{informational_settings: %{update_available: false}})
|> Ecto.Changeset.apply_changes()
assert mut2.informational_settings.update_available == false
end
test "reports soc_temp" do
orig = BotStateNG.new()
mut =
BotStateNG.changeset(orig, %{informational_settings: %{soc_temp: 100}})
|> Ecto.Changeset.apply_changes()
assert mut.informational_settings.soc_temp == 100
end
test "reports disk_usage" do
orig = BotStateNG.new()
mut =
BotStateNG.changeset(orig, %{informational_settings: %{disk_usage: 100}})
|> Ecto.Changeset.apply_changes()
assert mut.informational_settings.disk_usage == 100
end
test "reports memory_usage" do
orig = BotStateNG.new()
mut =
BotStateNG.changeset(orig, %{informational_settings: %{memory_usage: 512}})
|> Ecto.Changeset.apply_changes()
assert mut.informational_settings.memory_usage == 512
end
test "reports uptime" do
orig = BotStateNG.new()
mut =
BotStateNG.changeset(orig, %{informational_settings: %{uptime: 5000}})
|> Ecto.Changeset.apply_changes()
assert mut.informational_settings.uptime == 5000
end
test "reports wifi_level" do
orig = BotStateNG.new()
mut =
BotStateNG.changeset(orig, %{informational_settings: %{wifi_level: 52}})
|> Ecto.Changeset.apply_changes()
assert mut.informational_settings.wifi_level == 52
end
end
end
| 27.00885 | 89 | 0.648427 |
93fa7d2f107b62ade36db5eda412ad5adec4d5b4 | 2,434 | ex | Elixir | lib/honeydew/queues.ex | kianmeng/honeydew | 7c0e825c70ef4b72c82d02ca95491e7365d6b2e8 | [
"MIT"
] | 717 | 2015-06-15T19:30:54.000Z | 2022-03-22T06:10:09.000Z | lib/honeydew/queues.ex | kianmeng/honeydew | 7c0e825c70ef4b72c82d02ca95491e7365d6b2e8 | [
"MIT"
] | 106 | 2015-06-25T05:38:05.000Z | 2021-12-08T23:17:19.000Z | lib/honeydew/queues.ex | kianmeng/honeydew | 7c0e825c70ef4b72c82d02ca95491e7365d6b2e8 | [
"MIT"
] | 60 | 2015-06-07T00:48:37.000Z | 2022-03-06T08:20:23.000Z | defmodule Honeydew.Queues do
@moduledoc false
use Supervisor
alias Honeydew.Queue
alias Honeydew.Queue.Mnesia
alias Honeydew.Dispatcher.LRUNode
alias Honeydew.Dispatcher.LRU
alias Honeydew.FailureMode.Abandon
alias Honeydew.Processes
@type name :: Honeydew.queue_name()
@type queue_spec_opt :: Honeydew.queue_spec_opt()
@spec queues() :: [name]
def queues do
__MODULE__
|> Supervisor.which_children
|> Enum.map(fn {queue, _, _, _} -> queue end)
|> Enum.sort
end
@spec stop_queue(name) :: :ok | {:error, :not_running}
def stop_queue(name) do
with :ok <- Supervisor.terminate_child(__MODULE__, name) do
Supervisor.delete_child(__MODULE__, name)
end
end
@spec start_queue(name, [queue_spec_opt]) :: :ok | {:error, term()}
def start_queue(name, opts) do
{module, args} =
case opts[:queue] do
nil -> {Mnesia, [ram_copies: [node()]]}
module when is_atom(module) -> {module, []}
{module, args} -> {module, args}
end
dispatcher =
opts[:dispatcher] ||
case name do
{:global, _} -> {LRUNode, []}
_ -> {LRU, []}
end
failure_mode =
case opts[:failure_mode] do
nil -> {Abandon, []}
{module, args} -> {module, args}
module when is_atom(module) -> {module, []}
end
{failure_module, failure_args} = failure_mode
failure_module.validate_args!(failure_args)
success_mode =
case opts[:success_mode] do
nil -> nil
{module, args} -> {module, args}
module when is_atom(module) -> {module, []}
end
with {success_module, success_args} <- success_mode do
success_module.validate_args!(success_args)
end
suspended = Keyword.get(opts, :suspended, false)
module.validate_args!(args)
opts = [name, module, args, dispatcher, failure_mode, success_mode, suspended]
opts =
:functions
|> module.__info__
|> Enum.member?({:rewrite_opts, 1})
|> if do
module.rewrite_opts(opts)
else
opts
end
Processes.start_process_group_scope(name)
with {:ok, _} <- Supervisor.start_child(__MODULE__, Queue.child_spec(name, opts)) do
:ok
end
end
def start_link(args) do
Supervisor.start_link(__MODULE__, args, name: __MODULE__)
end
@impl true
def init(_args) do
Supervisor.init([], strategy: :one_for_one)
end
end
| 24.34 | 88 | 0.628595 |
93fa81624211cb80f62a0f4935f87900ef03a63a | 4,511 | ex | Elixir | clients/managed_identities/lib/google_api/managed_identities/v1/model/domain.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/managed_identities/lib/google_api/managed_identities/v1/model/domain.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/managed_identities/lib/google_api/managed_identities/v1/model/domain.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ManagedIdentities.V1.Model.Domain do
@moduledoc """
Represents a managed Microsoft Active Directory domain.
## Attributes
* `admin` (*type:* `String.t`, *default:* `nil`) - Optional. The name of delegated administrator account used to perform
Active Directory operations. If not specified, `setupadmin` will be used.
* `authorizedNetworks` (*type:* `list(String.t)`, *default:* `nil`) - Optional. The full names of the Google Compute Engine
[networks](/compute/docs/networks-and-firewalls#networks) the domain
instance is connected to. Networks can be added using UpdateDomain.
The domain is only available on networks listed in `authorized_networks`.
If CIDR subnets overlap between networks, domain creation will fail.
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The time the instance was created.
* `fqdn` (*type:* `String.t`, *default:* `nil`) - Output only. The fully-qualified domain name of the exposed domain used by
clients to connect to the service. Similar to what would be chosen for an
Active Directory set up on an internal network.
* `labels` (*type:* `map()`, *default:* `nil`) - Optional. Resource labels that can contain user-provided metadata.
* `locations` (*type:* `list(String.t)`, *default:* `nil`) - Required. Locations where domain needs to be provisioned.
regions
e.g. us-west1 or us-east4
Service supports up to 4 locations at once. Each location will use a /26
block.
* `name` (*type:* `String.t`, *default:* `nil`) - Required. The unique name of the domain using the form:
`projects/{project_id}/locations/global/domains/{domain_name}`.
* `reservedIpRange` (*type:* `String.t`, *default:* `nil`) - Required. The CIDR range of internal addresses that are reserved for this
domain. Reserved networks must be /24 or larger. Ranges must be
unique and non-overlapping with existing subnets in
[Domain].[authorized_networks].
* `state` (*type:* `String.t`, *default:* `nil`) - Output only. The current state of this domain.
* `statusMessage` (*type:* `String.t`, *default:* `nil`) - Output only. Additional information about the current status of this
domain, if available.
* `trusts` (*type:* `list(GoogleApi.ManagedIdentities.V1.Model.Trust.t)`, *default:* `nil`) - Output only. The current trusts associated with the domain.
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The last update time.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:admin => String.t(),
:authorizedNetworks => list(String.t()),
:createTime => DateTime.t(),
:fqdn => String.t(),
:labels => map(),
:locations => list(String.t()),
:name => String.t(),
:reservedIpRange => String.t(),
:state => String.t(),
:statusMessage => String.t(),
:trusts => list(GoogleApi.ManagedIdentities.V1.Model.Trust.t()),
:updateTime => DateTime.t()
}
field(:admin)
field(:authorizedNetworks, type: :list)
field(:createTime, as: DateTime)
field(:fqdn)
field(:labels, type: :map)
field(:locations, type: :list)
field(:name)
field(:reservedIpRange)
field(:state)
field(:statusMessage)
field(:trusts, as: GoogleApi.ManagedIdentities.V1.Model.Trust, type: :list)
field(:updateTime, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.ManagedIdentities.V1.Model.Domain do
def decode(value, options) do
GoogleApi.ManagedIdentities.V1.Model.Domain.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ManagedIdentities.V1.Model.Domain do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.989583 | 157 | 0.686101 |
93faaba22d06dc8e829f62317e1cc766f5c18196 | 3,216 | exs | Elixir | apps/admin_api/test/admin_api/v1/controllers/admin_auth/user_auth_controller_test.exs | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | apps/admin_api/test/admin_api/v1/controllers/admin_auth/user_auth_controller_test.exs | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | apps/admin_api/test/admin_api/v1/controllers/admin_auth/user_auth_controller_test.exs | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | defmodule AdminAPI.V1.AdminAuth.UserAuthControllerTest do
use AdminAPI.ConnCase, async: true
alias EWalletDB.AuthToken
describe "/user.login" do
test "responds with a new auth token if id is valid" do
user = insert(:user)
response = admin_user_request("/user.login", %{id: user.id})
auth_token = get_last_inserted(AuthToken)
expected = %{
"version" => @expected_version,
"success" => true,
"data" => %{
"object" => "authentication_token",
"authentication_token" => auth_token.token
}
}
assert response == expected
end
test "responds with a new auth token if provider_user_id is valid" do
_user = insert(:user, %{provider_user_id: "1234"})
response = admin_user_request("/user.login", %{provider_user_id: "1234"})
auth_token = get_last_inserted(AuthToken)
expected = %{
"version" => @expected_version,
"success" => true,
"data" => %{
"object" => "authentication_token",
"authentication_token" => auth_token.token
}
}
assert response == expected
end
test "returns an error if provider_user_id does not match a user" do
response = admin_user_request("/user.login", %{provider_user_id: "not_a_user"})
expected = %{
"version" => @expected_version,
"success" => false,
"data" => %{
"object" => "error",
"code" => "user:provider_user_id_not_found",
"description" => "There is no user corresponding to the provided provider_user_id",
"messages" => nil
}
}
assert response == expected
end
test "returns :invalid_parameter if provider_user_id is nil" do
response = admin_user_request("/user.login", %{provider_user_id: nil})
expected = %{
"version" => @expected_version,
"success" => false,
"data" => %{
"object" => "error",
"code" => "client:invalid_parameter",
"description" => "Invalid parameter provided",
"messages" => nil
}
}
assert response == expected
end
test "returns :invalid_parameter if provider_user_id is not provided" do
response = admin_user_request("/user.login", %{wrong_attr: "user1234"})
expected = %{
"version" => @expected_version,
"success" => false,
"data" => %{
"object" => "error",
"code" => "client:invalid_parameter",
"description" => "Invalid parameter provided",
"messages" => nil
}
}
assert response == expected
end
end
describe "/user.logout" do
test "responds success with empty response if logout successfully" do
_user = insert(:user, %{provider_user_id: "1234"})
admin_user_request("/user.login", %{provider_user_id: "1234"})
auth_token = get_last_inserted(AuthToken)
response =
admin_user_request("/user.logout", %{
"auth_token" => auth_token.token
})
assert response["version"] == @expected_version
assert response["success"] == true
assert response["data"] == %{}
end
end
end
| 29.504587 | 93 | 0.589241 |
93fac12a96cdd0ce6f043e2c3b4b15a574d049ff | 945 | exs | Elixir | mix.exs | zdenal/vega_lite | e554d57c82d2c860839fee5176bba881c25d1d61 | [
"Apache-2.0"
] | 61 | 2021-05-20T09:54:34.000Z | 2021-07-11T12:50:32.000Z | mix.exs | zdenal/vega_lite | e554d57c82d2c860839fee5176bba881c25d1d61 | [
"Apache-2.0"
] | 13 | 2021-07-22T17:43:32.000Z | 2022-01-31T22:01:46.000Z | mix.exs | zdenal/vega_lite | e554d57c82d2c860839fee5176bba881c25d1d61 | [
"Apache-2.0"
] | 8 | 2021-08-09T20:23:05.000Z | 2022-02-12T03:10:43.000Z | defmodule VegaLite.MixProject do
use Mix.Project
@version "0.1.2"
@description "Elixir bindings to Vega-Lite"
def project do
[
app: :vega_lite,
version: @version,
description: @description,
name: "VegaLite",
elixir: "~> 1.7",
deps: deps(),
docs: docs(),
package: package(),
# Modules used by VegaLite.WxViewer if available
xref: [exclude: [:wx, :wx_object, :wxFrame, :wxWebView]]
]
end
def application do
[]
end
defp deps do
[
{:jason, "~> 1.2", only: [:dev, :test]},
{:ex_doc, "~> 0.24", only: :dev, runtime: false}
]
end
defp docs do
[
main: "VegaLite",
source_url: "https://github.com/elixir-nx/vega_lite",
source_ref: "v#{@version}"
]
end
def package do
[
licenses: ["Apache-2.0"],
links: %{
"GitHub" => "https://github.com/elixir-nx/vega_lite"
}
]
end
end
| 18.9 | 62 | 0.546032 |
93fae51df44cbc6d89eec74ac3631eb0e8ef9594 | 670 | exs | Elixir | test/connection_test.exs | rrrene/amqp | 46eb14f5d31f4cab3ec91984f5eae04f8066d5a2 | [
"MIT"
] | null | null | null | test/connection_test.exs | rrrene/amqp | 46eb14f5d31f4cab3ec91984f5eae04f8066d5a2 | [
"MIT"
] | null | null | null | test/connection_test.exs | rrrene/amqp | 46eb14f5d31f4cab3ec91984f5eae04f8066d5a2 | [
"MIT"
] | null | null | null | defmodule ConnectionTest do
use ExUnit.Case
alias AMQP.Connection
test "open connection with default settings" do
assert {:ok, conn} = Connection.open
assert :ok = Connection.close(conn)
end
test "open connection with host as binary" do
assert {:ok, conn} = Connection.open host: "localhost"
assert :ok = Connection.close(conn)
end
test "open connection with host as char list" do
assert {:ok, conn} = Connection.open host: 'localhost'
assert :ok = Connection.close(conn)
end
test "open connection using uri" do
assert {:ok, conn} = Connection.open "amqp://localhost"
assert :ok = Connection.close(conn)
end
end
| 24.814815 | 59 | 0.692537 |
93fb37ac80d0f83fed9cdcf3ffed1a7e08f01fd4 | 2,287 | ex | Elixir | clients/assured_workloads/lib/google_api/assured_workloads/v1beta1/model/google_cloud_assuredworkloads_v1_create_workload_operation_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/assured_workloads/lib/google_api/assured_workloads/v1beta1/model/google_cloud_assuredworkloads_v1_create_workload_operation_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/assured_workloads/lib/google_api/assured_workloads/v1beta1/model/google_cloud_assuredworkloads_v1_create_workload_operation_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AssuredWorkloads.V1beta1.Model.GoogleCloudAssuredworkloadsV1CreateWorkloadOperationMetadata do
@moduledoc """
Operation metadata to give request details of CreateWorkload.
## Attributes
* `complianceRegime` (*type:* `String.t`, *default:* `nil`) - Optional. Compliance controls that should be applied to the resources managed by the workload.
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - Optional. Time when the operation was created.
* `displayName` (*type:* `String.t`, *default:* `nil`) - Optional. The display name of the workload.
* `parent` (*type:* `String.t`, *default:* `nil`) - Optional. The parent of the workload.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:complianceRegime => String.t() | nil,
:createTime => DateTime.t() | nil,
:displayName => String.t() | nil,
:parent => String.t() | nil
}
field(:complianceRegime)
field(:createTime, as: DateTime)
field(:displayName)
field(:parent)
end
defimpl Poison.Decoder,
for:
GoogleApi.AssuredWorkloads.V1beta1.Model.GoogleCloudAssuredworkloadsV1CreateWorkloadOperationMetadata do
def decode(value, options) do
GoogleApi.AssuredWorkloads.V1beta1.Model.GoogleCloudAssuredworkloadsV1CreateWorkloadOperationMetadata.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.AssuredWorkloads.V1beta1.Model.GoogleCloudAssuredworkloadsV1CreateWorkloadOperationMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.301587 | 160 | 0.72934 |
93fb3c512f69fd847ab2b027f2e55562138d9fd1 | 1,020 | ex | Elixir | lib/pixie/supervisor.ex | rdalin82/pixie | add50e2bd7fbd807c7b82cd10a2123828be4c58f | [
"MIT"
] | null | null | null | lib/pixie/supervisor.ex | rdalin82/pixie | add50e2bd7fbd807c7b82cd10a2123828be4c58f | [
"MIT"
] | null | null | null | lib/pixie/supervisor.ex | rdalin82/pixie | add50e2bd7fbd807c7b82cd10a2123828be4c58f | [
"MIT"
] | null | null | null | defmodule Pixie.Supervisor do
use Pixie.GenericSupervisor
import Supervisor.Spec
def children do
backend_options = Pixie.backend_options
backend_name = Dict.get backend_options, :name
backend_options = Dict.delete backend_options, :name
children = [
worker(Pixie.ExtensionRegistry, [Pixie.configured_extensions]),
worker(Pixie.Monitor, [Pixie.configured_monitors]),
worker(Pixie.JsonEncoderCache, [])
]
children = case Application.get_env(:pixie, :start_backend, true) do
false -> children
true -> children ++ [supervisor(Pixie.Backend, [backend_name, backend_options])]
end
children = case Application.get_env(:pixie, :start_subscriptions, true) do
false -> children
true -> children ++ [supervisor(Pixie.LocalSubscriptionSupervisor, [])]
end
children = case Application.get_env(:pixie, :start_cowboy, false) do
false -> children
true -> [worker(Pixie.Server, []) | children]
end
children
end
end
| 30 | 87 | 0.692157 |
93fb4fb973eb50708e45e8279789a67c52cca127 | 580 | exs | Elixir | exercises/concept/newsletter/mix.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/concept/newsletter/mix.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 1,938 | 2019-12-12T08:07:10.000Z | 2021-01-29T12:56:13.000Z | exercises/concept/newsletter/mix.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 239 | 2019-12-12T14:09:08.000Z | 2022-03-18T00:04:07.000Z | defmodule Newsletter.MixProject do
use Mix.Project
def project do
[
app: :newsletter,
version: "0.1.0",
# elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 20 | 87 | 0.57931 |
93fb78747b85f3481b354fcec73e96657ecd04ac | 95 | exs | Elixir | apps/rig/lib/result_test.exs | arana3/reactive-interaction-gateway | 793648bcc5b8b05fc53df1f5f97818fb40ca84be | [
"Apache-2.0"
] | 518 | 2017-11-09T13:10:49.000Z | 2022-03-28T14:29:50.000Z | apps/rig/lib/result_test.exs | arana3/reactive-interaction-gateway | 793648bcc5b8b05fc53df1f5f97818fb40ca84be | [
"Apache-2.0"
] | 270 | 2017-11-10T00:11:34.000Z | 2022-02-27T13:08:16.000Z | apps/rig/lib/result_test.exs | arana3/reactive-interaction-gateway | 793648bcc5b8b05fc53df1f5f97818fb40ca84be | [
"Apache-2.0"
] | 67 | 2017-12-19T20:16:37.000Z | 2022-03-31T10:43:04.000Z | defmodule ResultTest do
@moduledoc false
use ExUnit.Case, async: true
doctest Result
end
| 15.833333 | 30 | 0.768421 |
93fb8ae3d786c71a0b7fa1e0d9309c82d20cc2e3 | 5,877 | exs | Elixir | test/metric_transaction_test.exs | VitorTrin/elixir_agent | 03b1261eeafef6c016550ad51d939e75d59deda9 | [
"Apache-2.0"
] | null | null | null | test/metric_transaction_test.exs | VitorTrin/elixir_agent | 03b1261eeafef6c016550ad51d939e75d59deda9 | [
"Apache-2.0"
] | null | null | null | test/metric_transaction_test.exs | VitorTrin/elixir_agent | 03b1261eeafef6c016550ad51d939e75d59deda9 | [
"Apache-2.0"
] | null | null | null | defmodule MetricTransactionTest do
use ExUnit.Case
use Plug.Test
alias NewRelic.Harvest.Collector
defmodule TestPlugAppForward do
import Plug.Conn
def init(opts), do: opts
def call(conn, _opts), do: send_resp(conn, 200, "ok")
end
defmodule Status do
use Plug.Router
plug(:match)
plug(:dispatch)
get("/check", do: send_resp(conn, 200, "ok"))
get("/info", do: send_resp(conn, 200, "ok"))
end
defmodule External do
use NewRelic.Tracer
@trace :make_queries
def make_queries do
External.call(span: true)
External.call()
Process.sleep(20)
end
@trace {:call, category: :external}
def call(span: true) do
NewRelic.set_span(:http, url: "http://domain.net", method: "GET", component: "HttpClient")
Process.sleep(40)
end
@trace {:call, category: :external}
def call do
Process.sleep(40)
end
end
defmodule TestPlugApp do
use Plug.Router
use NewRelic.Transaction
plug(:match)
plug(:dispatch)
get "/foo/:blah" do
External.make_queries()
Process.sleep(10)
send_resp(conn, 200, blah)
end
get "/fail" do
raise "FAIL"
send_resp(conn, 200, "won't get here")
end
get "/ordering/:one/test/:two/ok/:three" do
send_resp(conn, 200, "ok")
end
get "/custom_name" do
NewRelic.set_transaction_name("/very/unique/name")
send_resp(conn, 200, "ok")
end
get "/named_wildcard/*public_variable_name" do
send_resp(conn, 200, "ok")
end
get "/unnamed_wildcard/*_secret_variable_name" do
send_resp(conn, 200, "ok")
end
get "/fancy/:transaction/:_names/*supported" do
send_resp(conn, 200, "hello")
end
forward("/forward/a", to: TestPlugAppForward)
forward("/forward/b", to: TestPlugAppForward)
forward("/status", to: Status)
end
setup do
TestHelper.restart_harvest_cycle(NewRelic.Harvest.Collector.Metric.HarvestCycle)
on_exit(fn ->
TestHelper.pause_harvest_cycle(NewRelic.Harvest.Collector.Metric.HarvestCycle)
end)
end
test "Basic web transaction" do
TestHelper.request(TestPlugApp, conn(:get, "/foo/1"))
metrics = TestHelper.gather_harvest(Collector.Metric.Harvester)
assert TestHelper.find_metric(metrics, "WebTransaction/Plug/GET//foo/:blah")
refute TestHelper.find_metric(metrics, "WebFrontend/QueueTime")
assert TestHelper.find_metric(metrics, "Apdex")
assert TestHelper.find_metric(metrics, "HttpDispatcher")
end
test "External metrics" do
TestHelper.request(TestPlugApp, conn(:get, "/foo/1"))
metrics = TestHelper.gather_harvest(Collector.Metric.Harvester)
assert TestHelper.find_metric(metrics, "WebTransaction/Plug/GET//foo/:blah")
# Unscoped
assert TestHelper.find_metric(metrics, "External/domain.net/HttpClient/GET")
assert TestHelper.find_metric(metrics, "External/allWeb", 2)
assert TestHelper.find_metric(metrics, "External/all", 2)
# Scoped
assert TestHelper.find_metric(
metrics,
{"External/domain.net/HttpClient/GET", "WebTransaction/Plug/GET//foo/:blah"}
)
assert TestHelper.find_metric(
metrics,
{"External/MetricTransactionTest.External.call",
"WebTransaction/Plug/GET//foo/:blah"}
)
end
test "Function trace metrics" do
TestHelper.request(TestPlugApp, conn(:get, "/foo/1"))
metrics = TestHelper.gather_harvest(Collector.Metric.Harvester)
assert TestHelper.find_metric(metrics, "WebTransaction/Plug/GET//foo/:blah")
# Unscoped
assert TestHelper.find_metric(
metrics,
"Function/MetricTransactionTest.External.make_queries/0"
)
# Scoped
assert TestHelper.find_metric(
metrics,
{"Function/MetricTransactionTest.External.make_queries/0",
"WebTransaction/Plug/GET//foo/:blah"}
)
end
test "Request queueing transaction" do
request_start = "t=#{System.system_time(:millisecond) - 100}"
conn =
conn(:get, "/foo/1")
|> put_req_header("x-request-start", request_start)
TestHelper.request(TestPlugApp, conn)
metrics = TestHelper.gather_harvest(Collector.Metric.Harvester)
assert [_, [1, time, time, time, time, 0]] =
TestHelper.find_metric(metrics, "WebFrontend/QueueTime")
assert_in_delta time, 0.1, 0.02
end
test "Failed transaction" do
TestHelper.request(TestPlugApp, conn(:get, "/fail"))
metrics = TestHelper.gather_harvest(Collector.Metric.Harvester)
assert TestHelper.find_metric(metrics, "Errors/all")
apdex = TestHelper.find_metric(metrics, "Apdex", 0)
assert [_, [_, _, 1, _, _, _]] = apdex
end
test "Custom transaction names" do
TestHelper.request(TestPlugApp, conn(:get, "/custom_name"))
metrics = TestHelper.gather_harvest(Collector.Metric.Harvester)
assert TestHelper.find_metric(metrics, "WebTransaction/very/unique/name")
end
test "fancy transaction names" do
TestHelper.request(TestPlugApp, conn(:get, "/fancy/transaction/names/supported/here!"))
metrics = TestHelper.gather_harvest(Collector.Metric.Harvester)
assert TestHelper.find_metric(
metrics,
"WebTransaction/Plug/GET//fancy/:transaction/:_names/*supported"
)
end
test "Forwarding transaction names" do
TestHelper.request(TestPlugApp, conn(:get, "/status/check"))
TestHelper.request(TestPlugApp, conn(:get, "/status/check"))
TestHelper.request(TestPlugApp, conn(:get, "/status/info"))
metrics = TestHelper.gather_harvest(Collector.Metric.Harvester)
assert TestHelper.find_metric(metrics, "WebTransaction/Plug/GET//status/check", 2)
assert TestHelper.find_metric(metrics, "WebTransaction/Plug/GET//status/info")
end
end
| 27.591549 | 96 | 0.673813 |
93fbab61fdfa92b5e845d487bf2df289de2769ae | 323 | exs | Elixir | priv/repo/migrations/20210421112426_create_table_consoles.exs | rafaelcorazzi/nostalgic_games | 34734aa3a89194730d0cc5e137f3db5f597979d4 | [
"MIT"
] | null | null | null | priv/repo/migrations/20210421112426_create_table_consoles.exs | rafaelcorazzi/nostalgic_games | 34734aa3a89194730d0cc5e137f3db5f597979d4 | [
"MIT"
] | null | null | null | priv/repo/migrations/20210421112426_create_table_consoles.exs | rafaelcorazzi/nostalgic_games | 34734aa3a89194730d0cc5e137f3db5f597979d4 | [
"MIT"
] | null | null | null | defmodule NostalgicGames.Repo.Migrations.CreateTableConsoles do
use Ecto.Migration
def change do
create table(:console, primary_key: false) do
add :console_id, :uuid, primary_key: true
add :console_plataform_name, :string
add :console_plataform_code, :string
timestamps()
end
end
end
| 24.846154 | 63 | 0.724458 |
93fbf13ac00b3697bf227e909c356a3487f97ccd | 3,268 | ex | Elixir | clients/vision/lib/google_api/vision/v1/model/product.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/product.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/product.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Vision.V1.Model.Product do
@moduledoc """
A Product contains ReferenceImages.
## Attributes
* `description` (*type:* `String.t`, *default:* `nil`) - User-provided metadata to be stored with this product. Must be at most 4096
characters long.
* `displayName` (*type:* `String.t`, *default:* `nil`) - The user-provided name for this Product. Must not be empty. Must be at most
4096 characters long.
* `name` (*type:* `String.t`, *default:* `nil`) - The resource name of the product.
Format is:
`projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
This field is ignored when creating a product.
* `productCategory` (*type:* `String.t`, *default:* `nil`) - The category for the product identified by the reference image. This should
be either "homegoods-v2", "apparel-v2", or "toys-v2". The legacy categories
"homegoods", "apparel", and "toys" are still supported, but these should
not be used for new products.
This field is immutable.
* `productLabels` (*type:* `list(GoogleApi.Vision.V1.Model.KeyValue.t)`, *default:* `nil`) - Key-value pairs that can be attached to a product. At query time,
constraints can be specified based on the product_labels.
Note that integer values can be provided as strings, e.g. "1199". Only
strings with integer values can match a range-based restriction which is
to be supported soon.
Multiple values can be assigned to the same key. One product may have up to
500 product_labels.
Notice that the total number of distinct product_labels over all products
in one ProductSet cannot exceed 1M, otherwise the product search pipeline
will refuse to work for that ProductSet.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:description => String.t(),
:displayName => String.t(),
:name => String.t(),
:productCategory => String.t(),
:productLabels => list(GoogleApi.Vision.V1.Model.KeyValue.t())
}
field(:description)
field(:displayName)
field(:name)
field(:productCategory)
field(:productLabels, as: GoogleApi.Vision.V1.Model.KeyValue, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Vision.V1.Model.Product do
def decode(value, options) do
GoogleApi.Vision.V1.Model.Product.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Vision.V1.Model.Product do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.373494 | 162 | 0.704406 |
93fc13c64a182534e63d5106f37373b8520e3de1 | 1,981 | exs | Elixir | clients/service_management/mix.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/service_management/mix.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/service_management/mix.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceManagement.Mixfile do
use Mix.Project
@version "0.58.5"
def project() do
[
app: :google_api_service_management,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/service_management"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Service Management API client library. Google Service Management allows service producers to publish their services on Google Cloud Platform so that they can be discovered and used by service consumers.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/service_management",
"Homepage" => "https://cloud.google.com/service-management/"
}
]
end
end
| 29.567164 | 206 | 0.672388 |
93fc4959e53725effa238ca26d6b52e31ac6c668 | 4,592 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/remarketing_list.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/remarketing_list.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/remarketing_list.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V34.Model.RemarketingList do
@moduledoc """
Contains properties of a remarketing list. Remarketing enables you to create lists of users who have performed specific actions on a site, then target ads to members of those lists. This resource can be used to manage remarketing lists that are owned by your advertisers. To see all remarketing lists that are visible to your advertisers, including those that are shared to your advertiser or account, use the TargetableRemarketingLists resource.
## Attributes
* `accountId` (*type:* `String.t`, *default:* `nil`) - Account ID of this remarketing list. This is a read-only, auto-generated field that is only returned in GET requests.
* `active` (*type:* `boolean()`, *default:* `nil`) - Whether this remarketing list is active.
* `advertiserId` (*type:* `String.t`, *default:* `nil`) - Dimension value for the advertiser ID that owns this remarketing list. This is a required field.
* `advertiserIdDimensionValue` (*type:* `GoogleApi.DFAReporting.V34.Model.DimensionValue.t`, *default:* `nil`) - Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* `description` (*type:* `String.t`, *default:* `nil`) - Remarketing list description.
* `id` (*type:* `String.t`, *default:* `nil`) - Remarketing list ID. This is a read-only, auto-generated field.
* `kind` (*type:* `String.t`, *default:* `dfareporting#remarketingList`) - Identifies what kind of resource this is. Value: the fixed string "dfareporting#remarketingList".
* `lifeSpan` (*type:* `String.t`, *default:* `nil`) - Number of days that a user should remain in the remarketing list without an impression. Acceptable values are 1 to 540, inclusive.
* `listPopulationRule` (*type:* `GoogleApi.DFAReporting.V34.Model.ListPopulationRule.t`, *default:* `nil`) - Rule used to populate the remarketing list with users.
* `listSize` (*type:* `String.t`, *default:* `nil`) - Number of users currently in the list. This is a read-only field.
* `listSource` (*type:* `String.t`, *default:* `nil`) - Product from which this remarketing list was originated.
* `name` (*type:* `String.t`, *default:* `nil`) - Name of the remarketing list. This is a required field. Must be no greater than 128 characters long.
* `subaccountId` (*type:* `String.t`, *default:* `nil`) - Subaccount ID of this remarketing list. This is a read-only, auto-generated field that is only returned in GET requests.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:accountId => String.t(),
:active => boolean(),
:advertiserId => String.t(),
:advertiserIdDimensionValue => GoogleApi.DFAReporting.V34.Model.DimensionValue.t(),
:description => String.t(),
:id => String.t(),
:kind => String.t(),
:lifeSpan => String.t(),
:listPopulationRule => GoogleApi.DFAReporting.V34.Model.ListPopulationRule.t(),
:listSize => String.t(),
:listSource => String.t(),
:name => String.t(),
:subaccountId => String.t()
}
field(:accountId)
field(:active)
field(:advertiserId)
field(:advertiserIdDimensionValue, as: GoogleApi.DFAReporting.V34.Model.DimensionValue)
field(:description)
field(:id)
field(:kind)
field(:lifeSpan)
field(:listPopulationRule, as: GoogleApi.DFAReporting.V34.Model.ListPopulationRule)
field(:listSize)
field(:listSource)
field(:name)
field(:subaccountId)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V34.Model.RemarketingList do
def decode(value, options) do
GoogleApi.DFAReporting.V34.Model.RemarketingList.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V34.Model.RemarketingList do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 55.325301 | 448 | 0.709277 |
93fc5e85c59c905a6fcb4b48a7ec7a62d1f9ef51 | 4,886 | exs | Elixir | test/oban/config_test.exs | wtfleming/oban | 1ffc4d1f25bb5dfb737b9e19e54ab0324e189ea5 | [
"Apache-2.0"
] | null | null | null | test/oban/config_test.exs | wtfleming/oban | 1ffc4d1f25bb5dfb737b9e19e54ab0324e189ea5 | [
"Apache-2.0"
] | null | null | null | test/oban/config_test.exs | wtfleming/oban | 1ffc4d1f25bb5dfb737b9e19e54ab0324e189ea5 | [
"Apache-2.0"
] | null | null | null | defmodule Oban.ConfigTest do
use Oban.Case, async: true
alias Oban.Config
alias Oban.Plugins.FixedPruner
describe "start_link/1" do
test "a config struct is stored for retreival" do
conf = Config.new(repo: Repo)
{:ok, pid} = Config.start_link(conf: conf)
assert %Config{} = Config.get(pid)
end
end
describe "new/1" do
test ":circuit_backoff is validated as an integer" do
assert_invalid(circuit_backoff: -1)
assert_invalid(circuit_backoff: 0)
assert_invalid(circuit_backoff: "5")
assert_invalid(circuit_backoff: 1.0)
assert_valid(circuit_backoff: 10)
end
test ":dispatch_cooldown is validated as a positive integer" do
assert_invalid(dispatch_cooldown: -1)
assert_invalid(dispatch_cooldown: 0)
assert_invalid(dispatch_cooldown: "5")
assert_invalid(dispatch_cooldown: 1.0)
assert_valid(dispatch_cooldown: 500)
end
test ":crontab is validated as a list of cron job expressions" do
assert_invalid(crontab: ["* * * * *"])
assert_invalid(crontab: [["* * * * *", Fake]])
assert_invalid(crontab: [Worker])
config = assert_valid(crontab: [{"* * * * *", Worker}])
assert [{%_{minutes: _}, Worker, []}] = config.crontab
config = assert_valid(crontab: [{"* * * * *", Worker, queue: "special"}])
assert [{%_{minutes: _}, Worker, [queue: "special"]}] = config.crontab
assert %Config{crontab: []} = conf(crontab: false)
end
test ":name is validated as a module" do
assert_invalid(name: "Oban")
assert_invalid(name: {:via, :whatever})
assert_valid(name: MyOban)
end
test ":node is validated as a binary" do
assert_invalid(node: nil)
assert_invalid(node: '')
assert_invalid(node: "")
assert_invalid(node: MyNode)
assert_valid(node: "MyNode")
end
test ":plugins are validated as modules or module/keyword tuples" do
assert_invalid(plugins: ["Module"])
assert_invalid(plugins: [FakeModule])
assert_invalid(plugins: [FixedPruner, FakeModule])
assert_invalid(plugins: [{Worker, nil}])
assert_invalid(plugins: [{Worker, %{}}])
assert_valid(plugins: false)
assert_valid(plugins: [])
assert_valid(plugins: [FixedPruner])
assert_valid(plugins: [{FixedPruner, []}])
assert_valid(plugins: [{FixedPruner, [name: "Something"]}])
end
test ":poll_interval is validated as an integer" do
assert_invalid(poll_interval: -1)
assert_invalid(poll_interval: 0)
assert_invalid(poll_interval: "5")
assert_invalid(poll_interval: 1.0)
assert_valid(poll_interval: 10)
end
test ":prefix is validated as a binary" do
assert_invalid(prefix: :private)
assert_invalid(prefix: " private schema ")
assert_invalid(prefix: "")
assert_valid(prefix: "private")
end
test ":queues are validated as atom, integer pairs" do
assert_invalid(queues: %{default: 25})
assert_invalid(queues: [{"default", 25}])
assert_invalid(queues: [default: 0])
assert_invalid(queues: [default: 3.5])
assert_valid(queues: [default: 1])
assert %Config{queues: []} = conf(queues: false)
end
test ":shutdown_grace_period is validated as an integer" do
assert_invalid(shutdown_grace_period: -1)
assert_invalid(shutdown_grace_period: 0)
assert_invalid(shutdown_grace_period: "5")
assert_invalid(shutdown_grace_period: 1.0)
assert_valid(shutdown_grace_period: 10)
end
test ":timezone is validated as a known timezone" do
assert_invalid(timezone: "")
assert_invalid(timezone: nil)
assert_invalid(timezone: "america")
assert_invalid(timezone: "america/chicago")
assert_valid(timezone: "Etc/UTC")
assert_valid(timezone: "Europe/Copenhagen")
assert_valid(timezone: "America/Chicago")
end
test ":verbose is validated as `false` or a valid log level" do
assert_invalid(verbose: 1)
assert_invalid(verbose: "false")
assert_invalid(verbose: nil)
assert_invalid(verbose: :warning)
assert_invalid(verbose: true)
assert_valid(verbose: false)
assert_valid(verbose: :warn)
end
end
describe "node_name/1" do
test "the system's DYNO value is favored when available" do
assert Config.node_name(%{"DYNO" => "worker.1"}) == "worker.1"
end
test "the local hostname is used without a DYNO variable" do
hostname = Config.node_name()
assert is_binary(hostname)
assert String.length(hostname) > 1
end
end
defp assert_invalid(opts) do
assert_raise ArgumentError, fn -> conf(opts) end
end
defp assert_valid(opts) do
assert %Config{} = conf(opts)
end
defp conf(opts) do
opts
|> Keyword.put(:repo, Repo)
|> Config.new()
end
end
| 29.257485 | 79 | 0.65923 |
93fc6a59f7e2fbb10fe261671461769b1c2202aa | 311 | exs | Elixir | priv/repo/migrations/20210128220303_make_webhook_key_not_nullable.exs | maco2035/console | 2a9a65678b8c671c7d92cdb62dfcfc71b84957c5 | [
"Apache-2.0"
] | 83 | 2018-05-31T14:49:10.000Z | 2022-03-27T16:49:49.000Z | priv/repo/migrations/20210128220303_make_webhook_key_not_nullable.exs | maco2035/console | 2a9a65678b8c671c7d92cdb62dfcfc71b84957c5 | [
"Apache-2.0"
] | 267 | 2018-05-22T23:19:02.000Z | 2022-03-31T04:31:06.000Z | priv/repo/migrations/20210128220303_make_webhook_key_not_nullable.exs | maco2035/console | 2a9a65678b8c671c7d92cdb62dfcfc71b84957c5 | [
"Apache-2.0"
] | 18 | 2018-11-20T05:15:54.000Z | 2022-03-28T08:20:13.000Z | defmodule Console.Repo.Migrations.MakeWebhookKeyNotNullable do
use Ecto.Migration
def up do
alter table("organizations") do
modify :webhook_key, :string, null: false
end
end
def down do
alter table("organizations") do
modify :webhook_key, :string, null: true
end
end
end
| 19.4375 | 62 | 0.697749 |
93fc70642edfd5e218467ca91f0769864c872b88 | 3,667 | exs | Elixir | test/elixir_lokalise_api/endpoints/screenshots_test.exs | kianmeng/elixir-lokalise-api | fca59438cbd8ee960adbbce8b13cef12ddc68ef6 | [
"BSD-3-Clause"
] | 3 | 2021-06-24T14:30:31.000Z | 2021-09-06T11:30:17.000Z | test/elixir_lokalise_api/endpoints/screenshots_test.exs | kianmeng/elixir-lokalise-api | fca59438cbd8ee960adbbce8b13cef12ddc68ef6 | [
"BSD-3-Clause"
] | 8 | 2021-09-15T07:30:59.000Z | 2022-02-01T17:40:17.000Z | test/elixir_lokalise_api/endpoints/screenshots_test.exs | kianmeng/elixir-lokalise-api | fca59438cbd8ee960adbbce8b13cef12ddc68ef6 | [
"BSD-3-Clause"
] | 2 | 2021-09-07T11:10:51.000Z | 2021-09-26T07:37:39.000Z | defmodule ElixirLokaliseApi.ScreenshotsTest do
use ExUnit.Case, async: true
use ExVCR.Mock, adapter: ExVCR.Adapter.Hackney
alias ElixirLokaliseApi.Pagination
alias ElixirLokaliseApi.Screenshots
alias ElixirLokaliseApi.Model.Screenshot, as: ScreenshotModel
alias ElixirLokaliseApi.Collection.Screenshots, as: ScreenshotsCollection
setup_all do
HTTPoison.start()
end
doctest Screenshots
@project_id "803826145ba90b42d5d860.46800099"
test "lists all screenshots" do
use_cassette "screenshots_all" do
{:ok, %ScreenshotsCollection{} = screenshots} = Screenshots.all(@project_id)
assert Enum.count(screenshots.items) == 3
assert screenshots.project_id == @project_id
screenshot = hd(screenshots.items)
assert screenshot.screenshot_id == 189_266
end
end
test "lists paginated screenshots" do
use_cassette "screenshots_all_paginated" do
{:ok, %ScreenshotsCollection{} = screenshots} =
Screenshots.all(@project_id, page: 2, limit: 1)
assert Enum.count(screenshots.items) == 1
assert screenshots.project_id == @project_id
assert screenshots.total_count == 3
assert screenshots.page_count == 3
assert screenshots.per_page_limit == 1
assert screenshots.current_page == 2
refute screenshots |> Pagination.first_page?()
refute screenshots |> Pagination.last_page?()
assert screenshots |> Pagination.next_page?()
assert screenshots |> Pagination.prev_page?()
screenshot = hd(screenshots.items)
assert screenshot.screenshot_id == 757_672
end
end
test "finds a screenshot" do
use_cassette "screenshot_find" do
screenshot_id = 757_672
{:ok, %ScreenshotModel{} = screenshot} = Screenshots.find(@project_id, screenshot_id)
assert screenshot.screenshot_id == screenshot_id
assert screenshot.key_ids == []
assert String.starts_with?(screenshot.url, "https://s3-eu-west-1")
assert screenshot.title == "1"
assert screenshot.description == ""
assert screenshot.screenshot_tags == []
assert screenshot.width == 307
assert screenshot.height == 97
assert screenshot.created_at == "2021-03-16 17:55:00 (Etc/UTC)"
assert screenshot.created_at_timestamp == 1_615_917_300
end
end
test "creates screenshots" do
use_cassette "screenshot_create" do
{:ok, base64} =
Path.expand("test/fixtures/screenshot_base64.txt")
|> File.read()
data = %{
screenshots: [
%{
data: base64,
title: "Elixir screen"
}
]
}
{:ok, %ScreenshotsCollection{} = screenshots} = Screenshots.create(@project_id, data)
assert screenshots.project_id == @project_id
assert screenshots.errors == []
screenshot = hd(screenshots.items)
assert screenshot.title == "Elixir screen"
end
end
test "updates a screenshot" do
use_cassette "screenshot_update" do
screenshot_id = 757_683
data = %{
title: "Elixir updated",
description: "Mix test"
}
{:ok, %ScreenshotModel{} = screenshot} =
Screenshots.update(@project_id, screenshot_id, data)
assert screenshot.title == "Elixir updated"
assert screenshot.screenshot_id == screenshot_id
assert screenshot.description == "Mix test"
end
end
test "deletes a screenshot" do
use_cassette "screenshot_delete" do
screenshot_id = 757_683
{:ok, %{} = resp} = Screenshots.delete(@project_id, screenshot_id)
assert resp.project_id == @project_id
assert resp.screenshot_deleted
end
end
end
| 30.057377 | 91 | 0.677393 |
93fc74430c6437c51b5af919f539a1f281c7163a | 2,079 | ex | Elixir | clients/container/lib/google_api/container/v1/model/master_authorized_networks_config.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/container/lib/google_api/container/v1/model/master_authorized_networks_config.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/container/lib/google_api/container/v1/model/master_authorized_networks_config.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Container.V1.Model.MasterAuthorizedNetworksConfig do
@moduledoc """
Configuration options for the master authorized networks feature. Enabled master authorized networks will disallow all external traffic to access Kubernetes master through HTTPS except traffic from the given CIDR blocks, Google Compute Engine Public IPs and Google Prod IPs.
## Attributes
- cidrBlocks ([CidrBlock]): cidr_blocks define up to 10 external networks that could access Kubernetes master through HTTPS. Defaults to: `null`.
- enabled (boolean()): Whether or not master authorized networks is enabled. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:cidrBlocks => list(GoogleApi.Container.V1.Model.CidrBlock.t()),
:enabled => any()
}
field(:cidrBlocks, as: GoogleApi.Container.V1.Model.CidrBlock, type: :list)
field(:enabled)
end
defimpl Poison.Decoder, for: GoogleApi.Container.V1.Model.MasterAuthorizedNetworksConfig do
def decode(value, options) do
GoogleApi.Container.V1.Model.MasterAuthorizedNetworksConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Container.V1.Model.MasterAuthorizedNetworksConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.764706 | 276 | 0.763829 |
93fc88991d79ddfbbfa0588b2cff4e4da4b3dd22 | 2,143 | exs | Elixir | config/dev.exs | d-led/d-zen | 25aec2d78e3053df055c0be9cdfac5673dc94f0f | [
"Unlicense"
] | null | null | null | config/dev.exs | d-led/d-zen | 25aec2d78e3053df055c0be9cdfac5673dc94f0f | [
"Unlicense"
] | null | null | null | config/dev.exs | d-led/d-zen | 25aec2d78e3053df055c0be9cdfac5673dc94f0f | [
"Unlicense"
] | null | null | null | import Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with esbuild to bundle .js and .css sources.
config :dzen, DzenWeb.Endpoint,
# Binding to loopback ipv4 address prevents access from other machines.
# Change to `ip: {0, 0, 0, 0}` to allow access from other machines.
http: [ip: {127, 0, 0, 1}, port: 4000],
check_origin: false,
code_reloader: true,
debug_errors: true,
secret_key_base: "V5bpKsYkqh2lAJN1ylLbch9fSgDA++gcRiT1/xXEzHdngnUgo3qvGbhCOHDqCcKD",
watchers: [
# Start the esbuild watcher by calling Esbuild.install_and_run(:default, args)
esbuild: {Esbuild, :install_and_run, [:default, ~w(--sourcemap=inline --watch)]}
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :dzen, DzenWeb.Endpoint,
live_reload: [
patterns: [
~r"priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$",
~r"priv/gettext/.*(po)$",
~r"lib/dzen_web/(live|views)/.*(ex)$",
~r"lib/dzen_web/templates/.*(eex)$"
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 32.469697 | 86 | 0.70462 |
93fca3b57c8f443370c4026c5770633f85977d51 | 473 | ex | Elixir | lib/hub_web/router.ex | austenmadden/hub | e9cd481a9d604230197790eb9672aa2b94716bc9 | [
"MIT"
] | null | null | null | lib/hub_web/router.ex | austenmadden/hub | e9cd481a9d604230197790eb9672aa2b94716bc9 | [
"MIT"
] | null | null | null | lib/hub_web/router.ex | austenmadden/hub | e9cd481a9d604230197790eb9672aa2b94716bc9 | [
"MIT"
] | null | null | null | defmodule HubWeb.Router do
use HubWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", HubWeb do
pipe_through :browser
get "/", PageController, :index
end
# Other scopes may use custom stacks.
# scope "/api", HubWeb do
# pipe_through :api
# end
end
| 17.518519 | 39 | 0.661734 |
93fca5e085bb5310b4e626e384f1dad1bc74076d | 7,569 | ex | Elixir | lib/extended_types/types.ex | eksperimental/extended_types | 23c060c8e42fca448354d4394004bee9921131a1 | [
"CC0-1.0",
"MIT-0"
] | 6 | 2022-01-12T19:15:46.000Z | 2022-01-13T07:40:29.000Z | lib/extended_types/types.ex | eksperimental/extended_types | 23c060c8e42fca448354d4394004bee9921131a1 | [
"CC0-1.0",
"MIT-0"
] | null | null | null | lib/extended_types/types.ex | eksperimental/extended_types | 23c060c8e42fca448354d4394004bee9921131a1 | [
"CC0-1.0",
"MIT-0"
] | null | null | null | defmodule ExtendedTypes.Types do
@moduledoc """
This module lists all the types availables in `ExtendedTypes`.
"""
Module.register_attribute(__MODULE__, :types, accumulate: true)
@types {:non_pos_integer, 0,
quote do
@typedoc """
A non-positive integer.
That is, any integer `<= 0`.
"""
@type non_pos_integer :: 0 | neg_integer()
end}
@types {:keyword, 2,
quote do
@typedoc """
A keyword list with `key_type` specified.
For example: `keyword(version :: atom(), map())`
"""
@type keyword(key_type, value_type) :: list({key_type, value_type})
end}
@types {:nonempty_keyword, 1,
quote do
@typedoc """
A non-empty keyword list.
"""
@type nonempty_keyword(value_type) :: nonempty_list({atom(), value_type})
end}
@types {:nonempty_keyword, 2,
quote do
@typedoc """
A non-empty keyword list with `key_type` specified.
For example: `nonempty_keyword(version :: atom(), map())`
"""
@type nonempty_keyword(key_type, value_type) :: nonempty_list({key_type, value_type})
end}
@types {:falsy, 0,
quote do
@typedoc """
Falsy. Any valud that is `nil` or `false`.
"""
@type falsy :: nil | false
end}
@types {:string_map, 0,
quote do
@typedoc """
Map with UTF-8 string key.
"""
@type string_map :: %{String.t() => any()}
end}
@types {:string_map, 1,
quote do
@typedoc """
Map with UTF-8 string key and with value of `value_type`.
"""
@type string_map(value_type) :: %{String.t() => value_type}
end}
@types {:atom_map, 0,
quote do
@typedoc """
Map with atom key.
"""
@type atom_map :: %{atom => any()}
end}
@types {:atom_map, 1,
quote do
@typedoc """
Map with atom key and with value of `value_type`.
"""
@type atom_map(value_type) :: %{atom => value_type}
end}
@types {:atom_map, 2,
quote do
@typedoc """
Map with atom `key_type` and with value of `value_type`.
This type is equivalent to `t:ExtendedTypes.Types.atom_map/1`
"""
@type atom_map(key_type, value_type) :: %{key_type => value_type}
end}
@types {:struct, 1,
quote do
@typedoc """
Struct `name` with all fields of any type.
`name` is expected to be an atom.
"""
@type struct(name) :: %{
:__struct__ => name,
optional(atom()) => any()
}
end}
@types {:struct, 2,
quote do
@typedoc """
Struct `name` with all fields of `value_type`.
`name` is expected to be an atom.
"""
@type struct(name, value_type) :: %{
:__struct__ => name,
optional(atom()) => value_type
}
end}
# @types {:nonempty_bitstring, 0,
# quote do
# @typedoc """
# Non-empty bitstring.
# Note: this type will be available in Elixir when OTP24+ is supported exclusively.
# """
# @type nonempty_bitstring :: <<_::1, _::_*1>>
# end}
# @types {:nonempty_binary, 0,
# quote do
# @typedoc """
# Non-empty binary.
# Note: this type will be available in Elixir when OTP24+ is supported exclusively.
# """
# @type nonempty_binary :: <<_::8, _::_*8>>
# end}
@types {:all, 0,
quote do
@typedoc """
All types.
A broken-down list akin to `t:any/0` or `t:term/0`.
This is particularly usefull when you want to manually create a type that exclude certain elements.
"""
@type all ::
atom
| bitstring
| pid
| port
| reference
| tuple
| fun
| map
| no_return()
# numbers
| float
| integer
# lists
| list()
| nonempty_improper_list(any, any_but_list)
end}
@types {:any_but_list, 0,
quote do
@typedoc """
Any type but a list.
Useful to use as the termination type of improper lists.
## Examples
@type your_list :: nonempty_improper_list(any, any_but_list)
"""
@type any_but_list ::
atom
| bitstring
| float
| fun
| integer
| map
| pid
| port
| reference
| tuple
end}
# Aliases
@types {:empty_bitstring, 0,
quote do
@typedoc """
Empty bitstring.
Alias of `<<>>`. This is to bring typespecs mentally closer to pattern matching, while patter-matching `<<>>` matches any type of bitstring.
"""
@type empty_bitstring :: <<>>
end}
@types {:empty_binary, 0,
quote do
@typedoc """
Empty binary.
Alias of `<<>>`. This is to bring typespecs mentally closer to pattern matching, while patter-matching `<<>>` matches any type of binary.
"""
@type empty_binary :: <<>>
end}
@types {:empty_map, 0,
quote do
@typedoc """
Empty map.
Alias of `%{}`. This is to bring typespecs mentally closer to pattern matching, while patter-matching `%{}` matches any type of map.
"""
@type empty_map :: %{}
end}
@types {:empty_list, 0,
quote do
@typedoc """
Empty list.
Alias of `[]`.
"""
@type empty_list :: []
end}
@types {:empty_tuple, 0,
quote do
@typedoc """
Empty tuple.
Alias of `%{}`.
"""
@type empty_tuple :: {}
end}
@types {:improper_list, 0,
quote do
@typedoc """
Improper list.
Alias of `nonempty_improper_list(any, any)`.
"""
@type improper_list :: nonempty_improper_list(any, any_but_list)
end}
@types {:improper_list, 2,
quote do
@typedoc """
Improper list.
Alias of `nonempty_maybe_improper_list(content_type, termination_type)`.
"""
@type improper_list(content_type, termination_type) ::
nonempty_improper_list(content_type, termination_type)
end}
# load all types
for {_type, _arity, quoted} <- @types do
Module.eval_quoted(__MODULE__, quoted)
end
@spec types() :: [{ExtendedTypes.type_name(), arity(), Macro.t() | improper_list()}]
def types(), do: @types
@doc false
def types_kw() do
for {type_name, arity, _} <- @types do
{type_name, arity}
end
|> Enum.sort()
end
end
| 26.745583 | 152 | 0.47034 |
93fcfc29c50d73f9e813adf8541bb8f086e96a45 | 344 | ex | Elixir | lib/supabase_surface/components/icons/icon_aperture.ex | treebee/supabase-surface | 5a184ca92323c085dd81e2fc8aa8c10367f2382e | [
"Apache-2.0"
] | 5 | 2021-06-08T08:02:43.000Z | 2022-02-09T23:13:46.000Z | lib/supabase_surface/components/icons/icon_aperture.ex | treebee/supabase-surface | 5a184ca92323c085dd81e2fc8aa8c10367f2382e | [
"Apache-2.0"
] | null | null | null | lib/supabase_surface/components/icons/icon_aperture.ex | treebee/supabase-surface | 5a184ca92323c085dd81e2fc8aa8c10367f2382e | [
"Apache-2.0"
] | 1 | 2021-07-14T05:20:31.000Z | 2021-07-14T05:20:31.000Z | defmodule SupabaseSurface.Components.Icons.IconAperture do
use SupabaseSurface.Components.Icon
@impl true
def render(assigns) do
icon_size = IconContainer.get_size(assigns.size)
~F"""
<IconContainer assigns={assigns}>
{Feathericons.aperture(width: icon_size, height: icon_size)}
</IconContainer>
"""
end
end
| 22.933333 | 66 | 0.72093 |
93fd0ae1d6df8e8d943ae5a4299d24a75c48513d | 5,880 | exs | Elixir | test/rels_test.exs | kianmeng/microformats2-elixir | 5a48e7bc8bb82ca4b43986f8497a066b23baf6a4 | [
"MIT"
] | 18 | 2016-02-20T12:19:08.000Z | 2021-05-26T07:57:09.000Z | test/rels_test.exs | kianmeng/microformats2-elixir | 5a48e7bc8bb82ca4b43986f8497a066b23baf6a4 | [
"MIT"
] | 17 | 2018-07-20T19:10:58.000Z | 2022-02-24T04:07:37.000Z | test/rels_test.exs | kianmeng/microformats2-elixir | 5a48e7bc8bb82ca4b43986f8497a066b23baf6a4 | [
"MIT"
] | 7 | 2016-11-06T23:13:55.000Z | 2021-02-20T09:36:49.000Z | defmodule Microformats2.RelsTest do
use ExUnit.Case
test "parse successfully parses rels" do
assert(
%{
"items" => _,
"rel-urls" => %{"http://blub" => %{"rels" => ["me"], "text" => "blub"}},
"rels" => %{"me" => ["http://blub"]}
} = Microformats2.parse("<a rel=\"me\" href=\"http://blub\">blub</a>", "http://localhost")
)
end
test "parse successfully parses multiple rels" do
assert(
%{
"items" => _,
"rel-urls" => %{
"http://blub" => %{"rels" => ["me"], "text" => "blub"},
"http://blah" => %{"rels" => ["me"], "text" => "blub"}
},
"rels" => %{"me" => ["http://blub", "http://blah"]}
} =
Microformats2.parse(
"""
<a rel=\"me\" href=\"http://blub\">blub</a>
<a rel=\"me\" href=\"http://blah\">blub</a>
""",
"http://localhost"
)
)
end
test "parse only saves one URL" do
assert(
%{
"items" => _,
"rel-urls" => %{"http://blub" => %{"rels" => ["me"], "text" => "blub"}},
"rels" => %{"me" => ["http://blub"]}
} =
Microformats2.parse(
"""
<a rel=\"me\" href=\"http://blub\">blub</a>
<a rel=\"me\" href=\"http://blub\">blub</a>
""",
"http://localhost"
)
)
end
test "parse saves all rels" do
assert(
%{
"items" => _,
"rel-urls" => %{"http://blub" => %{"rels" => ["me", "moo"], "text" => "blub"}},
"rels" => %{"me" => ["http://blub"], "moo" => ["http://blub"]}
} =
Microformats2.parse(
"""
<a rel=\"me\" href=\"http://blub\">blub</a>
<a rel=\"moo\" href=\"http://blub\">blub</a>
""",
"http://localhost"
)
)
end
test "parse successfully parses rels with attributes" do
assert(
%{
"items" => _,
"rel-urls" => %{"http://blub" => %{"rels" => ["me"], "media" => "video", "text" => "blub"}},
"rels" => %{"me" => ["http://blub"]}
} = Microformats2.parse("<a rel=\"me\" media=\"video\" href=\"http://blub\">blub</a>", "http://localhost")
)
assert(
%{
"items" => _,
"rel-urls" => %{"http://blub" => %{"rels" => ["me"], "hreflang" => "de", "text" => "blub"}},
"rels" => %{"me" => ["http://blub"]}
} = Microformats2.parse("<a rel=\"me\" hreflang=\"de\" href=\"http://blub\">blub</a>", "http://localhost")
)
assert(
%{
"items" => _,
"rel-urls" => %{"http://blub" => %{"rels" => ["me"], "title" => "blub", "text" => "blub"}},
"rels" => %{"me" => ["http://blub"]}
} = Microformats2.parse("<a rel=\"me\" title=\"blub\" href=\"http://blub\">blub</a>", "http://localhost")
)
assert(
%{
"items" => _,
"rel-urls" => %{"http://blub" => %{"rels" => ["me"], "type" => "text/html", "text" => "blub"}},
"rels" => %{"me" => ["http://blub"]}
} = Microformats2.parse("<a rel=\"me\" type=\"text/html\" href=\"http://blub\">blub</a>", "http://localhost")
)
assert(
%{
"items" => _,
"rel-urls" => %{
"http://blub" => %{
"rels" => ["me"],
"media" => "video",
"title" => "blub",
"hreflang" => "de",
"type" => "text/html",
"text" => "blub"
}
},
"rels" => %{"me" => ["http://blub"]}
} =
Microformats2.parse(
"<a rel=\"me\" hreflang=\"de\" media=\"video\" title=\"blub\" type=\"text/html\" href=\"http://blub\">blub</a>",
"http://localhost"
)
)
end
test "duplicate value doesn't overwrite the first one" do
assert(
%{
"items" => _,
"rel-urls" => %{"http://blub" => %{"rels" => ["me"], "text" => "blub", "hreflang" => "de"}},
"rels" => %{"me" => ["http://blub"]}
} =
Microformats2.parse(
"""
<a rel="me" hreflang="de" href="http://blub">blub</a>
<a rel="me" hreflang="en" href="http://blub">blah</a>
""",
"http://localhost"
)
)
end
test "parse ignores template elements" do
assert(
%{
"items" => _,
"rel-urls" => %{"http://blub" => %{"rels" => ["me"], "text" => "blub"}},
"rels" => %{"me" => ["http://blub"]}
} =
Microformats2.parse(
"""
<a rel="me" href="http://blub">blub</a>
<template><a rel="moo" href="http://blub">blub</a></template>
""",
"http://localhost"
)
)
end
test "parse generates an absolute URL" do
assert(
%{
"items" => _,
"rel-urls" => %{"http://localhost/blub" => %{"rels" => ["me"], "text" => "blub"}},
"rels" => %{"me" => ["http://localhost/blub"]}
} = Microformats2.parse("<a rel=\"me\" href=\"/blub\">blub</a>", "http://localhost")
)
assert(
%{
"items" => _,
"rel-urls" => %{"http://localhost/blub" => %{"rels" => ["me"], "text" => "blub"}},
"rels" => %{"me" => ["http://localhost/blub"]}
} = Microformats2.parse("<a rel=\"me\" href=\"blub\">blub</a>", "http://localhost")
)
assert(
%{
"items" => _,
"rel-urls" => %{"http://localhost/blah/foo/blub" => %{"rels" => ["me"], "text" => "blub"}},
"rels" => %{"me" => ["http://localhost/blah/foo/blub"]}
} = Microformats2.parse("<a rel=\"me\" href=\"blub\">blub</a>", "http://localhost/blah/foo")
)
assert(
%{
"items" => _,
"rel-urls" => %{"http://localhost/blub" => %{"rels" => ["me"], "text" => "blub"}},
"rels" => %{"me" => ["http://localhost/blub"]}
} = Microformats2.parse("<a rel=\"me\" href=\"/blub\">blub</a>", "http://localhost/blah/foo")
)
end
end
| 30.78534 | 122 | 0.415816 |
93fd0e7cbc6bccbc194c12801a9ea5d6e52e9131 | 32,409 | ex | Elixir | lib/elixir/lib/system.ex | Sae497/elixir | 666a0bca8daac6a9dffb80db837c883805a6a22e | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/system.ex | Sae497/elixir | 666a0bca8daac6a9dffb80db837c883805a6a22e | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/system.ex | Sae497/elixir | 666a0bca8daac6a9dffb80db837c883805a6a22e | [
"Apache-2.0"
] | null | null | null | defmodule System do
@moduledoc """
The `System` module provides functions that interact directly
with the VM or the host system.
## Time
The `System` module also provides functions that work with time,
returning different times kept by the system with support for
different time units.
One of the complexities in relying on system times is that they
may be adjusted. For example, when you enter and leave daylight
saving time, the system clock will be adjusted, often adding
or removing one hour. We call such changes "time warps". In
order to understand how such changes may be harmful, imagine
the following code:
## DO NOT DO THIS
prev = System.os_time()
# ... execute some code ...
next = System.os_time()
diff = next - prev
If, while the code is executing, the system clock changes,
some code that executed in 1 second may be reported as taking
over 1 hour! To address such concerns, the VM provides a
monotonic time via `System.monotonic_time/0` which never
decreases and does not leap:
## DO THIS
prev = System.monotonic_time()
# ... execute some code ...
next = System.monotonic_time()
diff = next - prev
Generally speaking, the VM provides three time measurements:
* `os_time/0` - the time reported by the operating system (OS). This time may be
adjusted forwards or backwards in time with no limitation;
* `system_time/0` - the VM view of the `os_time/0`. The system time and operating
system time may not match in case of time warps although the VM works towards
aligning them. This time is not monotonic (i.e., it may decrease)
as its behaviour is configured [by the VM time warp
mode](http://www.erlang.org/doc/apps/erts/time_correction.html#Time_Warp_Modes);
* `monotonic_time/0` - a monotonically increasing time provided
by the Erlang VM.
The time functions in this module work in the `:native` unit
(unless specified otherwise), which is operating system dependent. Most of
the time, all calculations are done in the `:native` unit, to
avoid loss of precision, with `convert_time_unit/3` being
invoked at the end to convert to a specific time unit like
`:millisecond` or `:microsecond`. See the `t:time_unit/0` type for
more information.
For a more complete rundown on the VM support for different
times, see the [chapter on time and time
correction](http://www.erlang.org/doc/apps/erts/time_correction.html)
in the Erlang docs.
"""
@typedoc """
The time unit to be passed to functions like `monotonic_time/1` and others.
The `:second`, `:millisecond`, `:microsecond` and `:nanosecond` time
units controls the return value of the functions that accept a time unit.
A time unit can also be a strictly positive integer. In this case, it
represents the "parts per second": the time will be returned in `1 /
parts_per_second` seconds. For example, using the `:millisecond` time unit
is equivalent to using `1000` as the time unit (as the time will be returned
in 1/1000 seconds - milliseconds).
"""
@type time_unit ::
:second
| :millisecond
| :microsecond
| :nanosecond
| pos_integer
@base_dir :filename.join(__DIR__, "../../..")
@version_file :filename.join(@base_dir, "VERSION")
defp strip(iodata) do
:re.replace(iodata, "^[\s\r\n\t]+|[\s\r\n\t]+$", "", [:global, return: :binary])
end
defp read_stripped(path) do
case :file.read_file(path) do
{:ok, binary} ->
strip(binary)
_ ->
""
end
end
# Read and strip the version from the VERSION file.
defmacrop get_version do
case read_stripped(@version_file) do
"" -> raise "could not read the version number from VERSION"
data -> data
end
end
# Returns OTP version that Elixir was compiled with.
defmacrop get_otp_release do
:erlang.list_to_binary(:erlang.system_info(:otp_release))
end
# Tries to run "git rev-parse --short=7 HEAD". In the case of success returns
# the short revision hash. If that fails, returns an empty string.
defmacrop get_revision do
null =
case :os.type() do
{:win32, _} -> 'NUL'
_ -> '/dev/null'
end
'git rev-parse --short=7 HEAD 2> '
|> Kernel.++(null)
|> :os.cmd()
|> strip
end
defp revision, do: get_revision()
# Get the date at compilation time.
# Follows https://reproducible-builds.org/specs/source-date-epoch/
defmacrop get_date do
unix_epoch =
if source_date_epoch = :os.getenv('SOURCE_DATE_EPOCH') do
try do
List.to_integer(source_date_epoch)
rescue
_ -> nil
end
end
unix_epoch = unix_epoch || :os.system_time(:second)
{{year, month, day}, {hour, minute, second}} =
:calendar.gregorian_seconds_to_datetime(unix_epoch + 62_167_219_200)
"~4..0b-~2..0b-~2..0bT~2..0b:~2..0b:~2..0bZ"
|> :io_lib.format([year, month, day, hour, minute, second])
|> :erlang.iolist_to_binary()
end
@doc """
Returns the endianness.
"""
@spec endianness() :: :little | :big
def endianness do
:erlang.system_info(:endian)
end
@doc """
Returns the endianness the system was compiled with.
"""
@endianness :erlang.system_info(:endian)
@spec compiled_endianness() :: :little | :big
def compiled_endianness do
@endianness
end
@doc """
Elixir version information.
Returns Elixir's version as binary.
"""
@spec version() :: String.t()
def version, do: get_version()
@doc """
Elixir build information.
Returns a map with the Elixir version, the Erlang/OTP release it was compiled
with, a short Git revision hash and the date and time it was built.
Every value in the map is a string, and these are:
* `:build` - the Elixir version, short Git revision hash and
Erlang/OTP release it was compiled with
* `:date` - a string representation of the ISO8601 date and time it was built
* `:opt_release` - OTP release it was compiled with
* `:revision` - short Git revision hash. If Git was not available at building
time, it is set to `""`
* `:version` - the Elixir version
One should not rely on the specific formats returned by each of those fields.
Instead one should use specialized functions, such as `version/0` to retrieve
the Elixir version and `otp_release/0` to retrieve the Erlang/OTP release.
## Examples
iex> System.build_info()
%{
build: "1.9.0-dev (772a00a0c) (compiled with Erlang/OTP 21)",
date: "2018-12-24T01:09:21Z",
otp_release: "21",
revision: "772a00a0c",
version: "1.9.0-dev"
}
"""
@spec build_info() :: %{
build: String.t(),
date: String.t(),
revision: String.t(),
version: String.t(),
otp_release: String.t()
}
def build_info do
%{
build: build(),
date: get_date(),
revision: revision(),
version: version(),
otp_release: get_otp_release()
}
end
# Returns a string of the build info
defp build do
{:ok, v} = Version.parse(version())
revision_string = if v.pre != [] and revision() != "", do: " (#{revision()})", else: ""
otp_version_string = " (compiled with Erlang/OTP #{get_otp_release()})"
version() <> revision_string <> otp_version_string
end
@doc """
Lists command line arguments.
Returns the list of command line arguments passed to the program.
"""
@spec argv() :: [String.t()]
def argv do
:elixir_config.get(:argv)
end
@doc """
Modifies command line arguments.
Changes the list of command line arguments. Use it with caution,
as it destroys any previous argv information.
"""
@spec argv([String.t()]) :: :ok
def argv(args) do
:elixir_config.put(:argv, args)
end
@doc """
Marks if the system should halt or not at the end of ARGV processing.
"""
@spec no_halt(boolean) :: :ok
def no_halt(boolean) when is_boolean(boolean) do
:elixir_config.put(:no_halt, boolean)
end
@doc """
Checks if the system will halt or not at the end of ARGV processing.
"""
@spec no_halt() :: boolean
def no_halt() do
:elixir_config.get(:no_halt)
end
@doc """
Current working directory.
Returns the current working directory or `nil` if one
is not available.
"""
@deprecated "Use File.cwd/0 instead"
@spec cwd() :: String.t() | nil
def cwd do
case File.cwd() do
{:ok, cwd} -> cwd
_ -> nil
end
end
@doc """
Current working directory, exception on error.
Returns the current working directory or raises `RuntimeError`.
"""
@deprecated "Use File.cwd!/0 instead"
@spec cwd!() :: String.t()
def cwd! do
case File.cwd() do
{:ok, cwd} ->
cwd
_ ->
raise "could not get a current working directory, the current location is not accessible"
end
end
@doc """
User home directory.
Returns the user home directory (platform independent).
"""
@spec user_home() :: String.t() | nil
def user_home do
:elixir_config.get(:home)
end
@doc """
User home directory, exception on error.
Same as `user_home/0` but raises `RuntimeError`
instead of returning `nil` if no user home is set.
"""
@spec user_home!() :: String.t()
def user_home! do
user_home() || raise "could not find the user home, please set the HOME environment variable"
end
@doc ~S"""
Writable temporary directory.
Returns a writable temporary directory.
Searches for directories in the following order:
1. the directory named by the TMPDIR environment variable
2. the directory named by the TEMP environment variable
3. the directory named by the TMP environment variable
4. `C:\TMP` on Windows or `/tmp` on Unix
5. as a last resort, the current working directory
Returns `nil` if none of the above are writable.
"""
@spec tmp_dir() :: String.t() | nil
def tmp_dir do
write_env_tmp_dir('TMPDIR') || write_env_tmp_dir('TEMP') || write_env_tmp_dir('TMP') ||
write_tmp_dir('/tmp') || write_cwd_tmp_dir()
end
defp write_cwd_tmp_dir do
case File.cwd() do
{:ok, cwd} -> write_tmp_dir(cwd)
_ -> nil
end
end
@doc """
Writable temporary directory, exception on error.
Same as `tmp_dir/0` but raises `RuntimeError`
instead of returning `nil` if no temp dir is set.
"""
@spec tmp_dir!() :: String.t()
def tmp_dir! do
tmp_dir() ||
raise "could not get a writable temporary directory, please set the TMPDIR environment variable"
end
defp write_env_tmp_dir(env) do
case :os.getenv(env) do
false -> nil
tmp -> write_tmp_dir(tmp)
end
end
defp write_tmp_dir(dir) do
case File.stat(dir) do
{:ok, stat} ->
case {stat.type, stat.access} do
{:directory, access} when access in [:read_write, :write] ->
IO.chardata_to_string(dir)
_ ->
nil
end
{:error, _} ->
nil
end
end
@doc """
Registers a program exit handler function.
Registers a function that will be invoked at the end of program execution.
Useful for invoking a hook in "script" mode.
The handler always executes in a different process from the one it was
registered in. As a consequence, any resources managed by the calling process
(ETS tables, open files, etc.) won't be available by the time the handler
function is invoked.
The function must receive the exit status code as an argument.
"""
@spec at_exit((non_neg_integer -> any)) :: :ok
def at_exit(fun) when is_function(fun, 1) do
:elixir_config.update(:at_exit, &[fun | &1])
:ok
end
@doc """
Locates an executable on the system.
This function looks up an executable program given
its name using the environment variable PATH on Unix
and Windows. It also considers the proper executable
extension for each operating system, so for Windows it will try to
lookup files with `.com`, `.cmd` or similar extensions.
"""
@spec find_executable(binary) :: binary | nil
def find_executable(program) when is_binary(program) do
assert_no_null_byte!(program, "System.find_executable/1")
case :os.find_executable(String.to_charlist(program)) do
false -> nil
other -> List.to_string(other)
end
end
@doc """
Returns all system environment variables.
The returned value is a map containing name-value pairs.
Variable names and their values are strings.
"""
@spec get_env() :: %{optional(String.t()) => String.t()}
def get_env do
Enum.into(:os.getenv(), %{}, fn var ->
var = IO.chardata_to_string(var)
[k, v] = String.split(var, "=", parts: 2)
{k, v}
end)
end
@doc """
Returns the value of the given environment variable.
The returned value of the environment variable
`varname` is a string. If the environment variable
is not set, returns the string specified in `default` or
`nil` if none is specified.
## Examples
iex> System.get_env("PORT")
"4000"
iex> System.get_env("NOT_SET")
nil
iex> System.get_env("NOT_SET", "4001")
"4001"
"""
@doc since: "1.9.0"
@spec get_env(String.t(), String.t() | nil) :: String.t() | nil
def get_env(varname, default \\ nil)
when is_binary(varname) and
(is_binary(default) or is_nil(default)) do
case :os.getenv(String.to_charlist(varname)) do
false -> default
other -> List.to_string(other)
end
end
@doc """
Returns the value of the given environment variable or `:error` if not found.
If the environment variable `varname` is set, then `{:ok, value}` is returned
where `value` is a string. If `varname` is not set, `:error` is returned.
## Examples
iex> System.fetch_env("PORT")
{:ok, "4000"}
iex> System.fetch_env("NOT_SET")
:error
"""
@doc since: "1.9.0"
@spec fetch_env(String.t()) :: {:ok, String.t()} | :error
def fetch_env(varname) when is_binary(varname) do
case :os.getenv(String.to_charlist(varname)) do
false -> :error
other -> {:ok, List.to_string(other)}
end
end
@doc """
Returns the value of the given environment variable or raises if not found.
Same as `get_env/1` but raises instead of returning `nil` when the variable is
not set.
## Examples
iex> System.fetch_env!("PORT")
"4000"
iex> System.fetch_env!("NOT_SET")
** (ArgumentError) could not fetch environment variable "NOT_SET" because it is not set
"""
@doc since: "1.9.0"
@spec fetch_env!(String.t()) :: String.t()
def fetch_env!(varname) when is_binary(varname) do
get_env(varname) ||
raise ArgumentError,
"could not fetch environment variable #{inspect(varname)} because it is not set"
end
@doc """
Erlang VM process identifier.
Returns the process identifier of the current Erlang emulator
in the format most commonly used by the operating system environment.
For more information, see `:os.getpid/0`.
"""
@spec get_pid() :: binary
def get_pid, do: IO.iodata_to_binary(:os.getpid())
@doc """
Sets an environment variable value.
Sets a new `value` for the environment variable `varname`.
"""
@spec put_env(binary, binary) :: :ok
def put_env(varname, value) when is_binary(varname) and is_binary(value) do
case :binary.match(varname, "=") do
{_, _} ->
raise ArgumentError,
"cannot execute System.put_env/2 for key with \"=\", got: #{inspect(varname)}"
:nomatch ->
:os.putenv(String.to_charlist(varname), String.to_charlist(value))
:ok
end
end
@doc """
Sets multiple environment variables.
Sets a new value for each environment variable corresponding
to each `{key, value}` pair in `enum`.
"""
@spec put_env(Enumerable.t()) :: :ok
def put_env(enum) do
Enum.each(enum, fn {key, val} -> put_env(key, val) end)
end
@doc """
Deletes an environment variable.
Removes the variable `varname` from the environment.
"""
@spec delete_env(String.t()) :: :ok
def delete_env(varname) do
:os.unsetenv(String.to_charlist(varname))
:ok
end
@doc """
Deprecated mechanism to retrieve the last exception stacktrace.
Accessing the stacktrace outside of a rescue/catch is deprecated.
If you want to support only Elixir v1.7+, you must access
`__STACKTRACE__/0` inside a rescue/catch. If you want to support
earlier Elixir versions, move `System.stacktrace/0` inside a rescue/catch.
Note that the Erlang VM (and therefore this function) does not
return the current stacktrace but rather the stacktrace of the
latest exception. To retrieve the stacktrace of the current process,
use `Process.info(self(), :current_stacktrace)` instead.
"""
# TODO: Fully deprecate it on Elixir v1.11 via @deprecated
# It is currently partially deprecated in elixir_dispatch.erl
def stacktrace do
apply(:erlang, :get_stacktrace, [])
end
@doc """
Immediately halts the Erlang runtime system.
Terminates the Erlang runtime system without properly shutting down
applications and ports. Please see `stop/1` for a careful shutdown of the
system.
`status` must be a non-negative integer, the atom `:abort` or a binary.
* If an integer, the runtime system exits with the integer value which
is returned to the operating system.
* If `:abort`, the runtime system aborts producing a core dump, if that is
enabled in the operating system.
* If a string, an Erlang crash dump is produced with status as slogan,
and then the runtime system exits with status code 1.
Note that on many platforms, only the status codes 0-255 are supported
by the operating system.
For more information, see `:erlang.halt/1`.
## Examples
System.halt(0)
System.halt(1)
System.halt(:abort)
"""
@spec halt(non_neg_integer | binary | :abort) :: no_return
def halt(status \\ 0)
def halt(status) when is_integer(status) or status == :abort do
:erlang.halt(status)
end
def halt(status) when is_binary(status) do
:erlang.halt(String.to_charlist(status))
end
@doc """
Returns the operating system PID for the current Erlang runtime system instance.
Returns a string containing the (usually) numerical identifier for a process.
On UNIX, this is typically the return value of the `getpid()` system call.
On Windows, the process ID as returned by the `GetCurrentProcessId()` system
call is used.
## Examples
System.pid()
"""
@doc since: "1.9.0"
@spec pid :: String.t()
def pid do
List.to_string(:os.getpid())
end
@doc """
Restarts all applications in the Erlang runtime system.
All applications are taken down smoothly, all code is unloaded, and all ports
are closed before the system starts all applications once again.
## Examples
System.restart()
"""
@doc since: "1.9.0"
@spec restart :: :ok
defdelegate restart(), to: :init
@doc """
Carefully stops the Erlang runtime system.
All applications are taken down smoothly, all code is unloaded, and all ports
are closed before the system terminates by calling `halt/1`.
`status` must be a non-negative integer value which is returned by the
runtime system to the operating system.
Note that on many platforms, only the status codes 0-255 are supported
by the operating system.
## Examples
System.stop(0)
System.stop(1)
"""
@doc since: "1.5.0"
@spec stop(non_neg_integer | binary) :: no_return
def stop(status \\ 0)
def stop(status) when is_integer(status) do
:init.stop(status)
end
def stop(status) when is_binary(status) do
:init.stop(String.to_charlist(status))
end
@doc ~S"""
Executes the given `command` with `args`.
`command` is expected to be an executable available in PATH
unless an absolute path is given.
`args` must be a list of binaries which the executable will receive
as its arguments as is. This means that:
* environment variables will not be interpolated
* wildcard expansion will not happen (unless `Path.wildcard/2` is used
explicitly)
* arguments do not need to be escaped or quoted for shell safety
This function returns a tuple containing the collected result
and the command exit status.
Internally, this function uses a `Port` for interacting with the
outside world. However, if you plan to run a long-running program,
ports guarantee stdin/stdout devices will be closed but it does not
automatically terminate the program. The documentation for the
`Port` module describes this problem and possible solutions under
the "Zombie processes" section.
## Examples
iex> System.cmd("echo", ["hello"])
{"hello\n", 0}
iex> System.cmd("echo", ["hello"], env: [{"MIX_ENV", "test"}])
{"hello\n", 0}
iex> System.cmd("echo", ["hello"], into: IO.stream(:stdio, :line))
hello
{%IO.Stream{}, 0}
## Options
* `:into` - injects the result into the given collectable, defaults to `""`
* `:cd` - the directory to run the command in
* `:env` - an enumerable of tuples containing environment key-value as binary
* `:arg0` - sets the command arg0
* `:stderr_to_stdout` - redirects stderr to stdout when `true`
* `:parallelism` - when `true`, the VM will schedule port tasks to improve
parallelism in the system. If set to `false`, the VM will try to perform
commands immediately, improving latency at the expense of parallelism.
The default can be set on system startup by passing the "+spp" argument
to `--erl`.
## Error reasons
If invalid arguments are given, `ArgumentError` is raised by
`System.cmd/3`. `System.cmd/3` also expects a strict set of
options and will raise if unknown or invalid options are given.
Furthermore, `System.cmd/3` may fail with one of the POSIX reasons
detailed below:
* `:system_limit` - all available ports in the Erlang emulator are in use
* `:enomem` - there was not enough memory to create the port
* `:eagain` - there are no more available operating system processes
* `:enametoolong` - the external command given was too long
* `:emfile` - there are no more available file descriptors
(for the operating system process that the Erlang emulator runs in)
* `:enfile` - the file table is full (for the entire operating system)
* `:eacces` - the command does not point to an executable file
* `:enoent` - the command does not point to an existing file
## Shell commands
If you desire to execute a trusted command inside a shell, with pipes,
redirecting and so on, please check `:os.cmd/1`.
"""
@spec cmd(binary, [binary], keyword) :: {Collectable.t(), exit_status :: non_neg_integer}
def cmd(command, args, opts \\ []) when is_binary(command) and is_list(args) do
assert_no_null_byte!(command, "System.cmd/3")
unless Enum.all?(args, &is_binary/1) do
raise ArgumentError, "all arguments for System.cmd/3 must be binaries"
end
cmd = String.to_charlist(command)
cmd =
if Path.type(cmd) == :absolute do
cmd
else
:os.find_executable(cmd) || :erlang.error(:enoent, [command, args, opts])
end
{into, opts} = cmd_opts(opts, [:use_stdio, :exit_status, :binary, :hide, args: args], "")
{initial, fun} = Collectable.into(into)
try do
do_cmd(Port.open({:spawn_executable, cmd}, opts), initial, fun)
catch
kind, reason ->
fun.(initial, :halt)
:erlang.raise(kind, reason, __STACKTRACE__)
else
{acc, status} -> {fun.(acc, :done), status}
end
end
defp do_cmd(port, acc, fun) do
receive do
{^port, {:data, data}} ->
do_cmd(port, fun.(acc, {:cont, data}), fun)
{^port, {:exit_status, status}} ->
{acc, status}
end
end
defp cmd_opts([{:into, any} | t], opts, _into), do: cmd_opts(t, opts, any)
defp cmd_opts([{:cd, bin} | t], opts, into) when is_binary(bin),
do: cmd_opts(t, [{:cd, bin} | opts], into)
defp cmd_opts([{:arg0, bin} | t], opts, into) when is_binary(bin),
do: cmd_opts(t, [{:arg0, bin} | opts], into)
defp cmd_opts([{:stderr_to_stdout, true} | t], opts, into),
do: cmd_opts(t, [:stderr_to_stdout | opts], into)
defp cmd_opts([{:stderr_to_stdout, false} | t], opts, into), do: cmd_opts(t, opts, into)
defp cmd_opts([{:parallelism, bool} | t], opts, into) when is_boolean(bool),
do: cmd_opts(t, [{:parallelism, bool} | opts], into)
defp cmd_opts([{:env, enum} | t], opts, into),
do: cmd_opts(t, [{:env, validate_env(enum)} | opts], into)
defp cmd_opts([{key, val} | _], _opts, _into),
do: raise(ArgumentError, "invalid option #{inspect(key)} with value #{inspect(val)}")
defp cmd_opts([], opts, into), do: {into, opts}
defp validate_env(enum) do
Enum.map(enum, fn
{k, nil} ->
{String.to_charlist(k), false}
{k, v} ->
{String.to_charlist(k), String.to_charlist(v)}
other ->
raise ArgumentError, "invalid environment key-value #{inspect(other)}"
end)
end
@doc """
Returns the current monotonic time in the `:native` time unit.
This time is monotonically increasing and starts in an unspecified
point in time.
Inlined by the compiler.
"""
@spec monotonic_time() :: integer
def monotonic_time do
:erlang.monotonic_time()
end
@doc """
Returns the current monotonic time in the given time unit.
This time is monotonically increasing and starts in an unspecified
point in time.
"""
@spec monotonic_time(time_unit) :: integer
def monotonic_time(unit) do
:erlang.monotonic_time(normalize_time_unit(unit))
end
@doc """
Returns the current system time in the `:native` time unit.
It is the VM view of the `os_time/0`. They may not match in
case of time warps although the VM works towards aligning
them. This time is not monotonic.
Inlined by the compiler.
"""
@spec system_time() :: integer
def system_time do
:erlang.system_time()
end
@doc """
Returns the current system time in the given time unit.
It is the VM view of the `os_time/0`. They may not match in
case of time warps although the VM works towards aligning
them. This time is not monotonic.
"""
@spec system_time(time_unit) :: integer
def system_time(unit) do
:erlang.system_time(normalize_time_unit(unit))
end
@doc """
Converts `time` from time unit `from_unit` to time unit `to_unit`.
The result is rounded via the floor function.
`convert_time_unit/3` accepts an additional time unit (other than the
ones in the `t:time_unit/0` type) called `:native`. `:native` is the time
unit used by the Erlang runtime system. It's determined when the runtime
starts and stays the same until the runtime is stopped. To determine what
the `:native` unit amounts to in a system, you can call this function to
convert 1 second to the `:native` time unit (i.e.,
`System.convert_time_unit(1, :second, :native)`).
"""
@spec convert_time_unit(integer, time_unit | :native, time_unit | :native) :: integer
def convert_time_unit(time, from_unit, to_unit) do
:erlang.convert_time_unit(time, normalize_time_unit(from_unit), normalize_time_unit(to_unit))
end
@doc """
Returns the current time offset between the Erlang VM monotonic
time and the Erlang VM system time.
The result is returned in the `:native` time unit.
See `time_offset/1` for more information.
Inlined by the compiler.
"""
@spec time_offset() :: integer
def time_offset do
:erlang.time_offset()
end
@doc """
Returns the current time offset between the Erlang VM monotonic
time and the Erlang VM system time.
The result is returned in the given time unit `unit`. The returned
offset, added to an Erlang monotonic time (e.g., obtained with
`monotonic_time/1`), gives the Erlang system time that corresponds
to that monotonic time.
"""
@spec time_offset(time_unit) :: integer
def time_offset(unit) do
:erlang.time_offset(normalize_time_unit(unit))
end
@doc """
Returns the current operating system (OS) time.
The result is returned in the `:native` time unit.
This time may be adjusted forwards or backwards in time
with no limitation and is not monotonic.
Inlined by the compiler.
"""
@spec os_time() :: integer
def os_time do
:os.system_time()
end
@doc """
Returns the current operating system (OS) time in the given time `unit`.
This time may be adjusted forwards or backwards in time
with no limitation and is not monotonic.
"""
@spec os_time(time_unit) :: integer
def os_time(unit) do
:os.system_time(normalize_time_unit(unit))
end
@doc """
Returns the Erlang/OTP release number.
"""
@spec otp_release :: String.t()
def otp_release do
:erlang.list_to_binary(:erlang.system_info(:otp_release))
end
@doc """
Returns the number of schedulers in the VM.
"""
@spec schedulers :: pos_integer
def schedulers do
:erlang.system_info(:schedulers)
end
@doc """
Returns the number of schedulers online in the VM.
"""
@spec schedulers_online :: pos_integer
def schedulers_online do
:erlang.system_info(:schedulers_online)
end
@doc """
Generates and returns an integer that is unique in the current runtime
instance.
"Unique" means that this function, called with the same list of `modifiers`,
will never return the same integer more than once on the current runtime
instance.
If `modifiers` is `[]`, then a unique integer (that can be positive or negative) is returned.
Other modifiers can be passed to change the properties of the returned integer:
* `:positive` - the returned integer is guaranteed to be positive.
* `:monotonic` - the returned integer is monotonically increasing. This
means that, on the same runtime instance (but even on different
processes), integers returned using the `:monotonic` modifier will always
be strictly less than integers returned by successive calls with the
`:monotonic` modifier.
All modifiers listed above can be combined; repeated modifiers in `modifiers`
will be ignored.
Inlined by the compiler.
"""
@spec unique_integer([:positive | :monotonic]) :: integer
def unique_integer(modifiers \\ []) do
:erlang.unique_integer(modifiers)
end
defp assert_no_null_byte!(binary, operation) do
case :binary.match(binary, "\0") do
{_, _} ->
raise ArgumentError,
"cannot execute #{operation} for program with null byte, got: #{inspect(binary)}"
:nomatch ->
binary
end
end
defp normalize_time_unit(:native), do: :native
defp normalize_time_unit(:second), do: :second
defp normalize_time_unit(:millisecond), do: :millisecond
defp normalize_time_unit(:microsecond), do: :microsecond
defp normalize_time_unit(:nanosecond), do: :nanosecond
defp normalize_time_unit(:seconds), do: warn(:seconds, :second)
defp normalize_time_unit(:milliseconds), do: warn(:milliseconds, :millisecond)
defp normalize_time_unit(:microseconds), do: warn(:microseconds, :microsecond)
defp normalize_time_unit(:nanoseconds), do: warn(:nanoseconds, :nanosecond)
defp normalize_time_unit(:milli_seconds), do: warn(:milli_seconds, :millisecond)
defp normalize_time_unit(:micro_seconds), do: warn(:micro_seconds, :microsecond)
defp normalize_time_unit(:nano_seconds), do: warn(:nano_seconds, :nanosecond)
defp normalize_time_unit(unit) when is_integer(unit) and unit > 0, do: unit
defp normalize_time_unit(other) do
raise ArgumentError,
"unsupported time unit. Expected :second, :millisecond, " <>
":microsecond, :nanosecond, or a positive integer, " <> "got #{inspect(other)}"
end
defp warn(unit, replacement_unit) do
{:current_stacktrace, stacktrace} = Process.info(self(), :current_stacktrace)
stacktrace = Enum.drop(stacktrace, 3)
:elixir_config.warn({System, unit}, stacktrace) &&
IO.warn(
"deprecated time unit: #{inspect(unit)}. A time unit should be " <>
":second, :millisecond, :microsecond, :nanosecond, or a positive integer",
stacktrace
)
replacement_unit
end
end
| 30.091922 | 102 | 0.674442 |
93fd55cc865f4b8d8bcb22a27546bbc65229f2e7 | 1,504 | ex | Elixir | lib/dialyxir/warnings/missing_range.ex | staring-frog/dialyxir | b78735f75b325238b7db20d9eed22f018cca5f26 | [
"Apache-2.0"
] | 1,455 | 2015-01-03T02:53:19.000Z | 2022-03-12T00:31:25.000Z | lib/dialyxir/warnings/missing_range.ex | staring-frog/dialyxir | b78735f75b325238b7db20d9eed22f018cca5f26 | [
"Apache-2.0"
] | 330 | 2015-05-14T13:53:13.000Z | 2022-03-29T17:12:23.000Z | lib/dialyxir/warnings/missing_range.ex | staring-frog/dialyxir | b78735f75b325238b7db20d9eed22f018cca5f26 | [
"Apache-2.0"
] | 146 | 2015-02-03T18:19:43.000Z | 2022-03-07T10:05:20.000Z | defmodule Dialyxir.Warnings.MissingRange do
@moduledoc """
Function spec declares a list of types, but function returns value
outside stated range.
This error only appears with the :overspecs flag.
## Example
defmodule Example do
@spec foo(any()) :: :ok
def foo(:ok) do
:ok
end
def foo(_) do
:error
end
end
"""
@behaviour Dialyxir.Warning
@impl Dialyxir.Warning
@spec warning() :: :missing_range
def warning(), do: :missing_range
@impl Dialyxir.Warning
@spec format_short([String.t()]) :: String.t()
def format_short([module, function, arity | _]) do
pretty_module = Erlex.pretty_print(module)
"The type specification is missing types returned by #{pretty_module}.#{function}/#{arity}."
end
@impl Dialyxir.Warning
@spec format_long([String.t()]) :: String.t()
def format_long([module, function, arity, extra_ranges, contract_range]) do
pretty_module = Erlex.pretty_print(module)
pretty_contract_range = Erlex.pretty_print_type(contract_range)
pretty_extra_ranges = Erlex.pretty_print_type(extra_ranges)
"""
The type specification is missing types returned by function.
Function:
#{pretty_module}.#{function}/#{arity}
Type specification return types:
#{pretty_contract_range}
Missing from spec:
#{pretty_extra_ranges}
"""
end
@impl Dialyxir.Warning
@spec explain() :: String.t()
def explain() do
@moduledoc
end
end
| 23.873016 | 96 | 0.672207 |
93fd5f24ebd6eff36f51582c778ff9e12799c536 | 1,947 | exs | Elixir | test/sanbase_web/graphql/signal/api_signal_metadata_test.exs | santiment/sanbase2 | 9ef6e2dd1e377744a6d2bba570ea6bd477a1db31 | [
"MIT"
] | 81 | 2017-11-20T01:20:22.000Z | 2022-03-05T12:04:25.000Z | test/sanbase_web/graphql/signal/api_signal_metadata_test.exs | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 359 | 2017-10-15T14:40:53.000Z | 2022-01-25T13:34:20.000Z | test/sanbase_web/graphql/signal/api_signal_metadata_test.exs | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 16 | 2017-11-19T13:57:40.000Z | 2022-02-07T08:13:02.000Z | defmodule SanbaseWeb.Graphql.Clickhouse.ApiSignalMetadataTest do
use SanbaseWeb.ConnCase, async: false
import Sanbase.Factory, only: [rand_str: 0]
import SanbaseWeb.Graphql.TestHelpers
alias Sanbase.Signal
test "returns data for all available signal", %{conn: conn} do
signals = Signal.available_signals()
aggregations = Signal.available_aggregations()
aggregations =
aggregations |> Enum.map(fn aggr -> aggr |> Atom.to_string() |> String.upcase() end)
for signal <- signals do
%{"data" => %{"getSignal" => %{"metadata" => metadata}}} = get_signal_metadata(conn, signal)
assert metadata["signal"] == signal
assert match?(
%{"signal" => _, "defaultAggregation" => _, "minInterval" => _, "dataType" => _},
metadata
)
assert metadata["defaultAggregation"] in aggregations
assert metadata["minInterval"] in ["1m"]
assert metadata["dataType"] in ["TIMESERIES"]
assert length(metadata["availableAggregations"]) > 0
end
end
test "returns error for unavailable signal", %{conn: conn} do
rand_signals = Enum.map(1..100, fn _ -> rand_str() end)
rand_signals = rand_signals -- Signal.available_signals()
# Do not mock the `histogram_data` function because it's the one that rejects
for signal <- rand_signals do
%{
"errors" => [
%{"message" => error_message}
]
} = get_signal_metadata(conn, signal)
assert error_message == "The signal '#{signal}' is not supported or is mistyped."
end
end
defp get_signal_metadata(conn, signal) do
query = """
{
getSignal(signal: "#{signal}"){
metadata{
minInterval
defaultAggregation
availableAggregations
dataType
signal
}
}
}
"""
conn
|> post("/graphql", query_skeleton(query))
|> json_response(200)
end
end
| 28.217391 | 98 | 0.620955 |
93fd8018404ca325fdf0219e29d43bb5034322e4 | 11,065 | ex | Elixir | lib/aws/cloud_trail.ex | ttyerl/aws-elixir | 48f6360fccee5dd587fab7a6efb109a399ff9a46 | [
"Apache-2.0"
] | 223 | 2015-05-29T17:45:35.000Z | 2021-06-29T08:37:14.000Z | lib/aws/cloud_trail.ex | ttyerl/aws-elixir | 48f6360fccee5dd587fab7a6efb109a399ff9a46 | [
"Apache-2.0"
] | 33 | 2015-11-20T20:56:43.000Z | 2021-07-09T20:13:34.000Z | lib/aws/cloud_trail.ex | ttyerl/aws-elixir | 48f6360fccee5dd587fab7a6efb109a399ff9a46 | [
"Apache-2.0"
] | 62 | 2015-06-14T20:53:24.000Z | 2021-12-13T07:20:15.000Z | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/jkakar/aws-codegen for more details.
defmodule AWS.CloudTrail do
@moduledoc """
AWS CloudTrail
This is the CloudTrail API Reference. It provides descriptions of actions,
data types, common parameters, and common errors for CloudTrail.
CloudTrail is a web service that records AWS API calls for your AWS account
and delivers log files to an Amazon S3 bucket. The recorded information
includes the identity of the user, the start time of the AWS API call, the
source IP address, the request parameters, and the response elements
returned by the service.
<note> As an alternative to the API, you can use one of the AWS SDKs, which
consist of libraries and sample code for various programming languages and
platforms (Java, Ruby, .NET, iOS, Android, etc.). The SDKs provide a
convenient way to create programmatic access to AWSCloudTrail. For example,
the SDKs take care of cryptographically signing requests, managing errors,
and retrying requests automatically. For information about the AWS SDKs,
including how to download and install them, see the [Tools for Amazon Web
Services page](http://aws.amazon.com/tools/).
</note> See the [AWS CloudTrail User
Guide](http://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-user-guide.html)
for information about the data that is included with each AWS API call
listed in the log files.
"""
@doc """
Adds one or more tags to a trail, up to a limit of 50. Tags must be unique
per trail. Overwrites an existing tag's value when a new value is specified
for an existing tag key. If you specify a key without a value, the tag will
be created with the specified key and a value of null. You can tag a trail
that applies to all regions only from the region in which the trail was
created (that is, from its home region).
"""
def add_tags(client, input, options \\ []) do
request(client, "AddTags", input, options)
end
@doc """
Creates a trail that specifies the settings for delivery of log data to an
Amazon S3 bucket. A maximum of five trails can exist in a region,
irrespective of the region in which they were created.
"""
def create_trail(client, input, options \\ []) do
request(client, "CreateTrail", input, options)
end
@doc """
Deletes a trail. This operation must be called from the region in which the
trail was created. `DeleteTrail` cannot be called on the shadow trails
(replicated trails in other regions) of a trail that is enabled in all
regions.
"""
def delete_trail(client, input, options \\ []) do
request(client, "DeleteTrail", input, options)
end
@doc """
Retrieves settings for the trail associated with the current region for
your account.
"""
def describe_trails(client, input, options \\ []) do
request(client, "DescribeTrails", input, options)
end
@doc """
Describes the settings for the event selectors that you configured for your
trail. The information returned for your event selectors includes the
following:
<ul> <li> The S3 objects that you are logging for data events.
</li> <li> If your event selector includes management events.
</li> <li> If your event selector includes read-only events, write-only
events, or all.
</li> </ul> For more information, see [Configuring Event Selectors for
Trails](http://docs.aws.amazon.com/awscloudtrail/latest/userguide/create-event-selectors-for-a-trail.html)
in the *AWS CloudTrail User Guide*.
"""
def get_event_selectors(client, input, options \\ []) do
request(client, "GetEventSelectors", input, options)
end
@doc """
Returns a JSON-formatted list of information about the specified trail.
Fields include information on delivery errors, Amazon SNS and Amazon S3
errors, and start and stop logging times for each trail. This operation
returns trail status from a single region. To return trail status from all
regions, you must call the operation on each region.
"""
def get_trail_status(client, input, options \\ []) do
request(client, "GetTrailStatus", input, options)
end
@doc """
Returns all public keys whose private keys were used to sign the digest
files within the specified time range. The public key is needed to validate
digest files that were signed with its corresponding private key.
<note> CloudTrail uses different private/public key pairs per region. Each
digest file is signed with a private key unique to its region. Therefore,
when you validate a digest file from a particular region, you must look in
the same region for its corresponding public key.
</note>
"""
def list_public_keys(client, input, options \\ []) do
request(client, "ListPublicKeys", input, options)
end
@doc """
Lists the tags for the trail in the current region.
"""
def list_tags(client, input, options \\ []) do
request(client, "ListTags", input, options)
end
@doc """
Looks up API activity events captured by CloudTrail that create, update, or
delete resources in your account. Events for a region can be looked up for
the times in which you had CloudTrail turned on in that region during the
last seven days. Lookup supports the following attributes:
<ul> <li> Event ID
</li> <li> Event name
</li> <li> Resource name
</li> <li> Resource type
</li> <li> User name
</li> </ul> All attributes are optional. The default number of results
returned is 10, with a maximum of 50 possible. The response includes a
token that you can use to get the next page of results.
<important> The rate of lookup requests is limited to one per second per
account. If this limit is exceeded, a throttling error occurs.
</important> <important> Events that occurred during the selected time
range will not be available for lookup if CloudTrail logging was not
enabled when the events occurred.
</important>
"""
def lookup_events(client, input, options \\ []) do
request(client, "LookupEvents", input, options)
end
@doc """
Configures an event selector for your trail. Use event selectors to specify
the type of events that you want your trail to log. When an event occurs in
your account, CloudTrail evaluates the event selectors in all trails. For
each trail, if the event matches any event selector, the trail processes
and logs the event. If the event doesn't match any event selector, the
trail doesn't log the event.
Example
<ol> <li> You create an event selector for a trail and specify that you
want write-only events.
</li> <li> The EC2 `GetConsoleOutput` and `RunInstances` API operations
occur in your account.
</li> <li> CloudTrail evaluates whether the events match your event
selectors.
</li> <li> The `RunInstances` is a write-only event and it matches your
event selector. The trail logs the event.
</li> <li> The `GetConsoleOutput` is a read-only event but it doesn't match
your event selector. The trail doesn't log the event.
</li> </ol> The `PutEventSelectors` operation must be called from the
region in which the trail was created; otherwise, an
`InvalidHomeRegionException` is thrown.
You can configure up to five event selectors for each trail. For more
information, see [Configuring Event Selectors for
Trails](http://docs.aws.amazon.com/awscloudtrail/latest/userguide/create-event-selectors-for-a-trail.html)
in the *AWS CloudTrail User Guide*.
"""
def put_event_selectors(client, input, options \\ []) do
request(client, "PutEventSelectors", input, options)
end
@doc """
Removes the specified tags from a trail.
"""
def remove_tags(client, input, options \\ []) do
request(client, "RemoveTags", input, options)
end
@doc """
Starts the recording of AWS API calls and log file delivery for a trail.
For a trail that is enabled in all regions, this operation must be called
from the region in which the trail was created. This operation cannot be
called on the shadow trails (replicated trails in other regions) of a trail
that is enabled in all regions.
"""
def start_logging(client, input, options \\ []) do
request(client, "StartLogging", input, options)
end
@doc """
Suspends the recording of AWS API calls and log file delivery for the
specified trail. Under most circumstances, there is no need to use this
action. You can update a trail without stopping it first. This action is
the only way to stop recording. For a trail enabled in all regions, this
operation must be called from the region in which the trail was created, or
an `InvalidHomeRegionException` will occur. This operation cannot be called
on the shadow trails (replicated trails in other regions) of a trail
enabled in all regions.
"""
def stop_logging(client, input, options \\ []) do
request(client, "StopLogging", input, options)
end
@doc """
Updates the settings that specify delivery of log files. Changes to a trail
do not require stopping the CloudTrail service. Use this action to
designate an existing bucket for log delivery. If the existing bucket has
previously been a target for CloudTrail log files, an IAM policy exists for
the bucket. `UpdateTrail` must be called from the region in which the trail
was created; otherwise, an `InvalidHomeRegionException` is thrown.
"""
def update_trail(client, input, options \\ []) do
request(client, "UpdateTrail", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "cloudtrail"}
host = get_host("cloudtrail", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "com.amazonaws.cloudtrail.v20131101.CloudTrail_20131101.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
| 39.377224 | 108 | 0.717488 |
93fd8798e93f60f87495cd55939a9ae473797192 | 1,179 | exs | Elixir | mix.exs | supersimple/elixir_bme680 | 76232b1ea9c472dc4d87fe35970381a6abbe35ca | [
"Apache-2.0"
] | null | null | null | mix.exs | supersimple/elixir_bme680 | 76232b1ea9c472dc4d87fe35970381a6abbe35ca | [
"Apache-2.0"
] | null | null | null | mix.exs | supersimple/elixir_bme680 | 76232b1ea9c472dc4d87fe35970381a6abbe35ca | [
"Apache-2.0"
] | null | null | null | defmodule ElixirBme680.MixProject do
use Mix.Project
def project do
[
app: :elixir_bme680,
version: "0.1.4",
elixir: "~> 1.7",
compilers: [:elixir_make] ++ Mix.compilers,
aliases: aliases(),
start_permanent: Mix.env() == :prod,
deps: deps(),
package: package(),
source_url: "https://github.com/lucaong/elixir_bme680",
docs: [
main: "Bme680",
extras: ["README.md"]
]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
defp package() do
[
description: "An Elixir library to interface with the BME680 gas sensor",
files: ["lib", "LICENSE", "mix.exs", "README.md", "src/*.c", "src/*.h", "src/linux/i2c-dev.h", "Makefile"],
maintainers: ["Luca Ongaro"],
licenses: ["Apache-2.0"],
links: %{}
]
end
defp aliases do
[clean: ["clean", "clean.make"]]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:elixir_make, "~> 0.4", runtime: false},
{:ex_doc, "~> 0.19", only: :dev, runtime: false}
]
end
end
| 23.117647 | 113 | 0.564885 |
93fd96de761fe77b848268905ab49a0334c1e263 | 256 | exs | Elixir | apps/ui/config/test.exs | Soimil/Igthorn | 6187a94d7a75a28f3c42b357fa7cc211cfe4bafe | [
"MIT"
] | 3 | 2019-01-25T21:51:06.000Z | 2020-01-12T21:52:29.000Z | ui/config/test.exs | kpanic/nerves_morse | 0400179d0571dd92c2aa469b039c63e192a6b92c | [
"Apache-2.0"
] | null | null | null | ui/config/test.exs | kpanic/nerves_morse | 0400179d0571dd92c2aa469b039c63e192a6b92c | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :ui, UiWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 23.272727 | 56 | 0.726563 |
93fda9c4fd8f5dd263c0c2a081df6d8275262765 | 738 | ex | Elixir | lib/the_great_machine_web/channels/lobby_channel.ex | kroucis/TheGreatMachine | 07808d4df792739e580d50f6ee6d489608445af5 | [
"MIT"
] | 1 | 2022-01-15T17:53:49.000Z | 2022-01-15T17:53:49.000Z | lib/the_great_machine_web/channels/lobby_channel.ex | kroucis/the_great_machine | 07808d4df792739e580d50f6ee6d489608445af5 | [
"MIT"
] | null | null | null | lib/the_great_machine_web/channels/lobby_channel.ex | kroucis/the_great_machine | 07808d4df792739e580d50f6ee6d489608445af5 | [
"MIT"
] | null | null | null | defmodule TheGreatMachineWeb.LobbyChannel do
use TheGreatMachineWeb, :channel
# def join("lobby:lobby", payload, socket) do
# {:ok, socket}
# end
# def match_state(match) do
# %{ id: match.id
# }
# end
# Channels can be used in a request/response fashion
# by sending replies to requests from the client
# def handle_in("ready", _payload, socket) do
# match_id = :rand.uniform(1000000)
# {:ok, server} = TheGreatMachine.start(match_id, [socket])
# match = TheGreatMachine.get_match(server)
# socket = assign(socket, :match_id, match.id)
# reply = %{ id: :rand.uniform(10000),
# match: match_state(match)
# }
# {:reply, {:ok, reply}, socket}
# end
end
| 27.333333 | 63 | 0.627371 |
93fde23445b0b4aed16dcf76f5ed703ebeb25832 | 5,951 | exs | Elixir | test/adapter_test.exs | genesisblockhq/spandex_datadog | 2ab773d8215136ad87c4d02bcf2ce21a65e6c67b | [
"MIT"
] | 43 | 2018-10-01T16:29:52.000Z | 2022-03-14T05:25:26.000Z | test/adapter_test.exs | genesisblockhq/spandex_datadog | 2ab773d8215136ad87c4d02bcf2ce21a65e6c67b | [
"MIT"
] | 42 | 2018-08-23T05:25:23.000Z | 2022-01-31T19:54:34.000Z | test/adapter_test.exs | genesisblockhq/spandex_datadog | 2ab773d8215136ad87c4d02bcf2ce21a65e6c67b | [
"MIT"
] | 31 | 2018-08-29T17:17:04.000Z | 2022-03-02T21:07:09.000Z | defmodule SpandexDatadog.Test.AdapterTest do
use ExUnit.Case, async: true
alias Spandex.SpanContext
alias SpandexDatadog.{
Adapter,
Test.TracedModule,
Test.Util
}
test "a complete trace sends spans" do
TracedModule.trace_one_thing()
spans = Util.sent_spans()
Enum.each(spans, fn span ->
assert span.service == :spandex_test
assert span.meta.env == "test"
end)
end
test "a trace can specify additional attributes" do
TracedModule.trace_with_special_name()
assert(Util.find_span("special_name").service == :special_service)
end
test "a span can specify additional attributes" do
TracedModule.trace_with_special_name()
assert(Util.find_span("special_name_span").service == :special_span_service)
end
test "a complete trace sends a top level span" do
TracedModule.trace_one_thing()
span = Util.find_span("trace_one_thing/0")
refute is_nil(span)
assert span.service == :spandex_test
assert span.meta.env == "test"
end
test "a complete trace sends the internal spans as well" do
TracedModule.trace_one_thing()
assert(Util.find_span("do_one_thing/0") != nil)
end
test "the parent_id for a child span is correct" do
TracedModule.trace_one_thing()
assert(Util.find_span("trace_one_thing/0").span_id == Util.find_span("do_one_thing/0").parent_id)
end
test "a span is correctly notated as an error if an excepton occurs" do
Util.can_fail(fn -> TracedModule.trace_one_error() end)
assert(Util.find_span("trace_one_error/0").error == 1)
end
test "spans all the way up are correctly notated as an error" do
Util.can_fail(fn -> TracedModule.error_two_deep() end)
assert(Util.find_span("error_two_deep/0").error == 1)
assert(Util.find_span("error_one_deep/0").error == 1)
end
test "successful sibling spans are not marked as failures when sibling fails" do
Util.can_fail(fn -> TracedModule.two_fail_one_succeeds() end)
assert(Util.find_span("error_one_deep/0", 0).error == 1)
assert(Util.find_span("do_one_thing/0").error == 0)
assert(Util.find_span("error_one_deep/0", 1).error == 1)
end
describe "distributed_context/2 with Plug.Conn" do
test "returns a SpanContext struct" do
conn =
:get
|> Plug.Test.conn("/")
|> Plug.Conn.put_req_header("x-datadog-trace-id", "123")
|> Plug.Conn.put_req_header("x-datadog-parent-id", "456")
|> Plug.Conn.put_req_header("x-datadog-sampling-priority", "2")
assert {:ok, %SpanContext{} = span_context} = Adapter.distributed_context(conn, [])
assert span_context.trace_id == 123
assert span_context.parent_id == 456
assert span_context.priority == 2
end
test "priority defaults to 1 (i.e. we currently assume all distributed traces should be kept)" do
conn =
:get
|> Plug.Test.conn("/")
|> Plug.Conn.put_req_header("x-datadog-trace-id", "123")
|> Plug.Conn.put_req_header("x-datadog-parent-id", "456")
assert {:ok, %SpanContext{priority: 1}} = Adapter.distributed_context(conn, [])
end
test "returns an error when it cannot detect both a Trace ID and a Span ID" do
conn = Plug.Test.conn(:get, "/")
assert {:error, :no_distributed_trace} = Adapter.distributed_context(conn, [])
end
end
describe "distributed_context/2 with Spandex.headers()" do
test "returns a SpanContext struct when headers is a list" do
headers = [{"x-datadog-trace-id", "123"}, {"x-datadog-parent-id", "456"}, {"x-datadog-sampling-priority", "2"}]
assert {:ok, %SpanContext{} = span_context} = Adapter.distributed_context(headers, [])
assert span_context.trace_id == 123
assert span_context.parent_id == 456
assert span_context.priority == 2
end
test "returns a SpanContext struct when headers is a map" do
headers = %{
"x-datadog-trace-id" => "123",
"x-datadog-parent-id" => "456",
"x-datadog-sampling-priority" => "2"
}
assert {:ok, %SpanContext{} = span_context} = Adapter.distributed_context(headers, [])
assert span_context.trace_id == 123
assert span_context.parent_id == 456
assert span_context.priority == 2
end
test "priority defaults to 1 (i.e. we currently assume all distributed traces should be kept)" do
headers = %{
"x-datadog-trace-id" => "123",
"x-datadog-parent-id" => "456"
}
assert {:ok, %SpanContext{priority: 1}} = Adapter.distributed_context(headers, [])
end
test "returns an error when it cannot detect both a Trace ID and a Span ID" do
headers = %{}
assert {:error, :no_distributed_trace} = Adapter.distributed_context(headers, [])
end
end
describe "inject_context/3" do
test "Prepends distributed tracing headers to an existing list of headers" do
span_context = %SpanContext{trace_id: 123, parent_id: 456, priority: 10}
headers = [{"header1", "value1"}, {"header2", "value2"}]
result = Adapter.inject_context(headers, span_context, [])
assert result == [
{"x-datadog-trace-id", "123"},
{"x-datadog-parent-id", "456"},
{"x-datadog-sampling-priority", "10"},
{"header1", "value1"},
{"header2", "value2"}
]
end
test "Merges distributed tracing headers with an existing map of headers" do
span_context = %SpanContext{trace_id: 123, parent_id: 456, priority: 10}
headers = %{"header1" => "value1", "header2" => "value2"}
result = Adapter.inject_context(headers, span_context, [])
assert result == %{
"x-datadog-trace-id" => "123",
"x-datadog-parent-id" => "456",
"x-datadog-sampling-priority" => "10",
"header1" => "value1",
"header2" => "value2"
}
end
end
end
| 33.621469 | 117 | 0.64611 |
93fdee888a6ce6e2fd246fc1da7222eb81ab501c | 5,290 | ex | Elixir | lib/smppsend/esme_helpers.ex | savonarola/smppsend | f7a4784dfa2cfa8a37876877eab1212b3b37bfc5 | [
"Apache-2.0"
] | 11 | 2017-06-09T06:55:31.000Z | 2019-09-17T07:05:37.000Z | lib/smppsend/esme_helpers.ex | savonarola/smppsend | f7a4784dfa2cfa8a37876877eab1212b3b37bfc5 | [
"Apache-2.0"
] | 11 | 2017-07-14T10:03:58.000Z | 2019-11-15T09:22:30.000Z | lib/smppsend/esme_helpers.ex | savonarola/smppsend | f7a4784dfa2cfa8a37876877eab1212b3b37bfc5 | [
"Apache-2.0"
] | 9 | 2017-06-19T07:10:06.000Z | 2019-11-14T11:03:56.000Z | defmodule SMPPSend.ESMEHelpers do
alias SMPPEX.Pdu
alias SMPPEX.Pdu.Factory
alias SMPPEX.Pdu.PP
alias :timer, as: Timer
require Logger
def connect(host, port, bind_pdu, session_opts \\ [], esme_mod \\ SMPPEX.ESME.Sync) do
Logger.info "Connecting to #{host}:#{port}"
case esme_mod.start_link(host, port, session_opts) do
{:ok, esme} ->
Logger.info "Connected"
bind(esme, bind_pdu, esme_mod)
{:error, reason} -> {:error, "error connecting: #{inspect reason}"}
end
end
defp bind(esme, bind_pdu, esme_mod) do
Logger.info("Binding #{PP.format(bind_pdu)}")
response = esme_mod.request(esme, bind_pdu)
case response do
{:ok, pdu} ->
Logger.info("Bind response:#{PP.format(pdu)}")
case Pdu.command_status(pdu) do
0 ->
Logger.info("Bound successfully")
{:ok, esme}
status -> {:error, "bind failed, status: #{status}"}
end
:timeout -> {:error, "bind failed, timeout"}
:stop -> {:error, "bind failed, esme stopped"}
{:error, error} -> {:error, "bind failed, error: #{inspect error}"}
end
end
def send_messages(_esme, _submit_sms, _esme_mod \\ SMPPEX.ESME.Sync, _message_ids \\ [])
def send_messages(_esme, [], _esme_mod, message_ids), do: {:ok, Enum.reverse(message_ids)}
def send_messages(esme, [submit_sm | submit_sms], esme_mod, message_ids) do
Logger.info("Sending submit_sm#{PP.format(submit_sm)}")
case esme_mod.request(esme, submit_sm) do
{:ok, resp} ->
Logger.info("Got response#{PP.format(resp)}")
case Pdu.command_status(resp) do
0 -> send_messages(esme, submit_sms, esme_mod, [Pdu.field(resp, :message_id) | message_ids])
status ->
{:error, "message status: #{status}"}
end
:timeout -> {:error, "timeout"}
:stop -> {:error, "esme stopped"}
{:error, reason} -> {:error, "error: #{inspect reason}"}
end
end
def wait_dlrs(_esme, _message_ids, _timeout, _esme_mod \\ SMPPEX.ESME.Sync)
def wait_dlrs(_esme, [], _timeout, _esme_mod), do: :ok
def wait_dlrs(_esme, _message_ids, timeout, _esme_mod) when timeout <= 0, do: {:error, "timeout"}
def wait_dlrs(esme, message_ids, timeout, esme_mod) do
case wait_for_pdus(esme, esme_mod, timeout) do
{_, :stop} -> {:error, "ESME stopped while waiting for dlrs"}
{_, :timeout} -> {:error, "timeout while waiting for dlrs"}
{time, pdus} ->
receipted_message_ids = handle_async_results(esme, pdus)
case message_ids -- receipted_message_ids do
[] -> :ok
remaining_message_ids ->
wait_dlrs(esme, remaining_message_ids, timeout - div(time, 1000), esme_mod)
end
end
end
defp wait_for_pdus(esme, esme_mod, timeout) do
Timer.tc(fn() ->
esme_mod.wait_for_pdus(esme, timeout)
end)
end
def wait_infinitely(esme, esme_mod \\ SMPPEX.ESME.Sync, next \\ &wait_infinitely/3)
def wait_infinitely(esme, esme_mod, next) do
Logger.info("Waiting...")
case esme_mod.wait_for_pdus(esme) do
:stop -> {:error, "esme stopped"}
:timeout -> next.(esme, esme_mod, next)
wait_result ->
handle_async_results(esme, wait_result)
next.(esme, esme_mod, next)
end
end
defp handle_async_results(esme, pdus, message_ids \\ [])
defp handle_async_results(_esme, [], message_ids), do: message_ids
defp handle_async_results(esme, [{:pdu, pdu} | rest_pdus], message_ids) do
Logger.info("Pdu received:#{PP.format pdu}")
case Pdu.command_name(pdu) do
:deliver_sm ->
receipted_message_id = Pdu.field(pdu, :receipted_message_id)
deliver_sm_resp(esme,pdu)
handle_async_results(esme, rest_pdus, [ receipted_message_id | message_ids ])
_ ->
handle_async_results(esme, rest_pdus, message_ids)
end
end
defp handle_async_results(esme, [{:resp, pdu, _original_pdu} | rest_pdus], message_ids) do
Logger.info("Response received:#{PP.format pdu}")
handle_async_results(esme, rest_pdus, message_ids)
end
defp handle_async_results(esme, [{:timeout, pdu} | rest_pdus], message_ids) do
Logger.info("Pdu timeout:#{PP.format pdu}")
handle_async_results(esme, rest_pdus, message_ids)
end
defp handle_async_results(esme, [{:ok, pdu} | rest_pdus], message_ids) do
Logger.info("Pdu sent:#{PP.format pdu}")
handle_async_results(esme, rest_pdus, message_ids)
end
defp handle_async_results(esme, [{:error, pdu, error} | rest_pdus], message_ids) do
Logger.info("Pdu send error(#{inspect error}):#{PP.format pdu}")
handle_async_results(esme, rest_pdus, message_ids)
end
def unbind(esme, esme_mod \\ SMPPEX.ESME.Sync) do
unbind_pdu = Factory.unbind
Logger.info("Sending unbind:#{PP.format unbind_pdu}")
response = esme_mod.request(esme, unbind_pdu)
case response do
{:ok, pdu} ->
Logger.info("Unbind resp received:#{PP.format pdu}")
:ok
:timeout -> {:error, "timeout"}
:stop -> {:error, "esme stopped"}
{:error, error} -> {:error, inspect(error)}
end
end
defp deliver_sm_resp(esme, pdu) do
resp = Factory.deliver_sm_resp |> Pdu.as_reply_to(pdu)
SMPPEX.Session.send_pdu(esme, resp)
end
end
| 34.802632 | 102 | 0.651229 |
93fdf168022961c808a3f38d923d15077be782f7 | 377 | ex | Elixir | tests/init_api/NimlerWrapper.ex | pilotier/nimler | bc80503d9e300ba59889f533b79dc51962083ba7 | [
"MIT"
] | null | null | null | tests/init_api/NimlerWrapper.ex | pilotier/nimler | bc80503d9e300ba59889f533b79dc51962083ba7 | [
"MIT"
] | null | null | null | tests/init_api/NimlerWrapper.ex | pilotier/nimler | bc80503d9e300ba59889f533b79dc51962083ba7 | [
"MIT"
] | null | null | null | defmodule NimlerInitApi do
@on_load :init
def init(),
do:
:erlang.load_nif(
to_charlist(Path.join(Path.dirname(__ENV__.file), 'nif')),
123
)
def test(), do: :erlang.nif_error(:nif_library_not_loaded)
def test_priv(), do: :erlang.nif_error(:nif_library_not_loaded)
def test_dirty(), do: :erlang.nif_error(:nif_library_not_loaded)
end
| 25.133333 | 66 | 0.681698 |
93fdf78df15b181911a886641f75a46a0ad853bb | 13,842 | ex | Elixir | lib/ex_force.ex | kianmeng/ex_force | d2f8179d61dce6771d787fc08651071e0f2be448 | [
"MIT"
] | 28 | 2017-09-23T01:05:06.000Z | 2022-03-05T09:59:12.000Z | lib/ex_force.ex | kianmeng/ex_force | d2f8179d61dce6771d787fc08651071e0f2be448 | [
"MIT"
] | 66 | 2017-09-28T17:08:17.000Z | 2022-03-04T21:04:30.000Z | lib/ex_force.ex | kianmeng/ex_force | d2f8179d61dce6771d787fc08651071e0f2be448 | [
"MIT"
] | 18 | 2017-09-23T01:07:37.000Z | 2022-02-21T04:12:26.000Z | defmodule ExForce do
@moduledoc """
Simple wrapper for Salesforce REST API.
## Installation
The package can be installed by adding `ex_force` to your list of dependencies in `mix.exs`:
```elixir
def deps do
[
{:ex_force, "~> 0.3"}
]
end
```
Check out [Choosing a Tesla Adapter](https://github.com/chulkilee/ex_force/wiki/Choosing-a-Tesla-Adapter).
## Usage
```elixir
{:ok, %{instance_url: instance_url} = oauth_response} =
ExForce.OAuth.get_token(
"https://login.salesforce.com",
grant_type: "password",
client_id: "client_id",
client_secret: "client_secret",
username: "username",
password: "password" <> "security_token"
)
{:ok, version_maps} = ExForce.versions(instance_url)
latest_version = version_maps |> Enum.map(&Map.fetch!(&1, "version")) |> List.last()
client = ExForce.build_client(oauth_response, api_version: latest_version)
names =
ExForce.query_stream(client, "SELECT Name FROM Account")
|> Stream.map(&Map.fetch!(&1.data, "Name"))
|> Stream.take(50)
|> Enum.to_list()
```
Note that streams emit `ExForce.SObject` or an error tuple.
"""
alias ExForce.{
Client,
QueryResult,
Request,
Response,
SObject
}
@type client :: Client.t()
@type sobject_id :: String.t()
@type sobject_name :: String.t()
@type field_name :: String.t()
@type soql :: String.t()
@type query_id :: String.t()
@type sobject :: %{id: String.t(), attributes: %{type: String.t()}}
defdelegate build_client(instance_url), to: Client
defdelegate build_client(instance_url, opts), to: Client
@doc """
Lists available REST API versions at an instance.
See [Versions](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_versions.htm)
"""
@spec versions(String.t()) :: {:ok, list(map)} | {:error, any}
def versions(instance_url) do
case instance_url
|> Client.build_client()
|> Client.request(%Request{method: :get, url: "/services/data"}) do
{:ok, %Response{status: 200, body: body}} when is_list(body) -> {:ok, body}
{:ok, %Response{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@doc """
Lists available resources for the specific API version.
See [Resources by Version](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_discoveryresource.htm)
"""
@spec resources(client, String.t()) :: {:ok, map} | {:error, any}
def resources(client, version) do
case Client.request(client, %Request{method: :get, url: "/services/data/v#{version}"}) do
{:ok, %Response{status: 200, body: body}} -> {:ok, body}
{:ok, %Response{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@doc """
Lists the available objects.
See [Describe Global](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_describeGlobal.htm)
"""
@spec describe_global(client) :: {:ok, map} | {:error, any}
def describe_global(client) do
case Client.request(client, %Request{method: :get, url: "sobjects"}) do
{:ok, %Response{status: 200, body: body}} -> {:ok, body}
{:ok, %Response{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@doc """
Retrieves extended metadata for the specified SObject.
See [SObject Describe](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_sobject_describe.htm)
"""
@spec describe_sobject(client, sobject_name) :: {:ok, map} | {:error, any}
def describe_sobject(client, name) do
case Client.request(client, %Request{method: :get, url: "sobjects/#{name}/describe"}) do
{:ok, %Response{status: 200, body: body}} -> {:ok, body}
{:ok, %Response{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@doc """
Retrieves basic metadata for the specific SObject.
See [SObject Basic Information](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_sobject_basic_info.htm)
"""
@spec basic_info(client, sobject_name) :: {:ok, map} | {:error, any}
def basic_info(client, name) do
case Client.request(client, %Request{method: :get, url: "sobjects/#{name}"}) do
{:ok, %Response{status: 200, body: %{"recentItems" => recent_items} = body}} ->
{:ok, Map.put(body, "recentItems", Enum.map(recent_items, &SObject.build/1))}
{:ok, %Response{body: body}} ->
{:error, body}
{:error, _} = other ->
other
end
end
@doc """
Retrieves a SObject by ID.
See [SObject Rows](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_sobject_retrieve.htm)
"""
@spec get_sobject(client, sobject_id, sobject_name, list) :: {:ok, SObject.t()} | {:error, any}
def get_sobject(client, id, name, fields),
do: do_get_sobject(client, "sobjects/#{name}/#{id}", fields)
@doc """
Retrieves a SObject based on the value of a specified extneral ID field.
See [SObject Rows by External ID](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_sobject_upsert.htm)
"""
@spec get_sobject_by_external_id(client, any, field_name, sobject_name) ::
{:ok, SObject.t()} | {:error, any}
def get_sobject_by_external_id(client, field_value, field_name, sobject_name),
do:
do_get_sobject(client, "sobjects/#{sobject_name}/#{field_name}/#{URI.encode(field_value)}")
@doc """
Retrieves a SObject by relationship field.
See [SObject Relationships](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_sobject_relationships.htm)
"""
@spec get_sobject_by_relationship(
client,
sobject_id,
sobject_name,
field_name,
list(field_name)
) :: {:ok, SObject.t() | QueryResult.t()} | {:error, any}
def get_sobject_by_relationship(client, id, sobject_name, field_name, fields) do
path = "sobjects/#{sobject_name}/#{id}/#{field_name}"
case Client.request(client, %Request{
method: :get,
url: path,
query: build_fields_query(fields)
}) do
{:ok, %Response{status: 200, body: %{"attributes" => _} = body}} ->
{:ok, SObject.build(body)}
{:ok, %Response{status: 200, body: %{"records" => _} = body}} ->
{:ok, build_result_set(body)}
{:ok, %Response{body: body}} ->
{:error, body}
{:error, _} = other ->
other
end
end
defp do_get_sobject(client, path, fields \\ []) do
case Client.request(client, %Request{
method: :get,
url: path,
query: build_fields_query(fields)
}) do
{:ok, %Response{status: 200, body: body}} -> {:ok, SObject.build(body)}
{:ok, %Response{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
defp build_fields_query([]), do: []
defp build_fields_query(fields), do: [fields: Enum.join(fields, ",")]
@doc """
Updates a SObject.
See [SObject Rows](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_sobject_retrieve.htm)
"""
@spec update_sobject(client, sobject_id, sobject_name, map) :: :ok | {:error, any}
def update_sobject(client, id, name, attrs) do
case Client.request(client, %Request{
method: :patch,
url: "sobjects/#{name}/#{id}",
body: attrs
}) do
{:ok, %Response{status: 204, body: ""}} -> :ok
{:ok, %Response{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@doc """
Updates multiple SObjects using the Composite API.
It uses the Composite API to update multiple records (up to 200) in one call, returning a list of SaveResult objects.
You can choose whether to roll back the entire request when an error occurs.
If more than 200 records need to be updated at once, try using the Bulk API.
See [Update Multiple Records with Fewer Round-Trips](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_composite_sobjects_collections_update.htm)
"""
@spec update_sobjects(client, records :: list(sobject), all_or_none :: boolean) ::
{:ok, any} | {:error, any}
def update_sobjects(client, records, all_or_none \\ false) do
body = %{records: records, allOrNone: all_or_none}
case Client.request(client, %Request{method: :patch, url: "composite/sobjects", body: body}) do
{:ok, %Response{status: 200, body: body}} -> {:ok, body}
{:ok, %Response{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@doc """
Creates a SObject.
See [SObject Rows](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_sobject_basic_info.htm)
"""
@spec create_sobject(client, sobject_name, map) :: {:ok, sobject_id} | {:error, any}
def create_sobject(client, name, attrs) do
case Client.request(client, %Request{method: :post, url: "sobjects/#{name}/", body: attrs}) do
{:ok, %Response{status: 201, body: %{"id" => id, "success" => true}}} -> {:ok, id}
{:ok, %Response{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@doc """
Deletes a SObject.
[SObject Rows](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_sobject_retrieve.htm)
"""
@spec delete_sobject(client, sobject_id, sobject_name) :: :ok | {:error, any}
def delete_sobject(client, id, name) do
case Client.request(client, %Request{method: :delete, url: "sobjects/#{name}/#{id}"}) do
{:ok, %Response{status: 204, body: ""}} -> :ok
{:ok, %Response{status: 404, body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@doc """
Excutes the SOQL query and get the result of it.
[Query](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_query.htm)
"""
@spec query(client, soql) :: {:ok, QueryResult.t()} | {:error, any}
def query(client, soql) do
case Client.request(client, %Request{method: :get, url: "query", query: [q: soql]}) do
{:ok, %Response{status: 200, body: body}} -> {:ok, build_result_set(body)}
{:ok, %Response{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@spec query_stream(client, soql) :: Enumerable.t()
def query_stream(client, soql), do: start_query_stream(client, &query/2, soql)
@doc """
Retrieves additional query results for the specified query ID.
[Query](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_query.htm)
"""
@spec query_retrieve(client, query_id | String.t()) :: {:ok, QueryResult.t()} | {:error, any}
def query_retrieve(client, query_id_or_url) do
path =
if full_path?(query_id_or_url) do
query_id_or_url
else
"query/#{query_id_or_url}"
end
case Client.request(client, %Request{method: :get, url: path}) do
{:ok, %Response{status: 200, body: body}} -> {:ok, build_result_set(body)}
{:ok, %Response{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@doc """
Excutes the SOQL query and get the result of it, including deleted or archived objects.
[QueryAll](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_queryall.htm)
"""
@spec query_all(client, soql) :: {:ok, QueryResult.t()} | {:error, any}
def query_all(client, soql) do
case Client.request(client, %Request{method: :get, url: "queryAll", query: [q: soql]}) do
{:ok, %Response{status: 200, body: body}} -> {:ok, build_result_set(body)}
{:ok, %Response{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@spec query_all_stream(client, soql) :: Enumerable.t()
def query_all_stream(client, soql), do: start_query_stream(client, &query_all/2, soql)
defp build_result_set(%{"records" => records, "totalSize" => total_size} = resp) do
case resp do
%{"done" => true} ->
%QueryResult{
done: true,
total_size: total_size,
records: records |> Enum.map(&SObject.build/1)
}
%{"done" => false, "nextRecordsUrl" => next_records_url} ->
%QueryResult{
done: false,
next_records_url: next_records_url,
total_size: total_size,
records: records |> Enum.map(&SObject.build/1)
}
end
end
@spec start_query_stream(
client,
(client, soql -> {:ok, QueryResult.t()} | {:error, any}),
soql
) :: Enumerable.t()
defp start_query_stream(client, func, soql) do
Stream.resource(
fn -> {client, func.(client, soql)} end,
&stream_next/1,
fn _acc -> nil end
)
end
@doc """
Returns `Enumerable.t` from the `QueryResult`.
"""
@spec stream_query_result(client, QueryResult.t()) :: Enumerable.t()
def stream_query_result(client, %QueryResult{} = qr) do
Stream.resource(
fn -> {client, {:ok, qr}} end,
&stream_next/1,
fn _acc -> nil end
)
end
defp stream_next({client, :halt}), do: {:halt, client}
defp stream_next({client, {:error, _} = error_tuple}), do: {[error_tuple], {client, :halt}}
defp stream_next({client, {:ok, %QueryResult{records: records, done: true}}}),
do: {records, {client, :halt}}
defp stream_next(
{client, {:ok, %QueryResult{records: records, done: false, next_records_url: url}}}
),
do: {records, {client, {:retrieve, url}}}
defp stream_next({client, {:retrieve, next_records_url}}),
do: {[], {client, query_retrieve(client, next_records_url)}}
defp full_path?(path), do: String.starts_with?(path, "/services/data/v")
end
| 34.954545 | 180 | 0.63748 |
93fe42d792383d01bafb8316a00817710082fa8a | 2,839 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/filter_view.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/filter_view.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/filter_view.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Sheets.V4.Model.FilterView do
@moduledoc """
A filter view.
## Attributes
* `criteria` (*type:* `%{optional(String.t) => GoogleApi.Sheets.V4.Model.FilterCriteria.t}`, *default:* `nil`) - The criteria for showing/hiding values per column.
The map's key is the column index, and the value is the criteria for
that column.
* `filterViewId` (*type:* `integer()`, *default:* `nil`) - The ID of the filter view.
* `namedRangeId` (*type:* `String.t`, *default:* `nil`) - The named range this filter view is backed by, if any.
When writing, only one of range or named_range_id
may be set.
* `range` (*type:* `GoogleApi.Sheets.V4.Model.GridRange.t`, *default:* `nil`) - The range this filter view covers.
When writing, only one of range or named_range_id
may be set.
* `sortSpecs` (*type:* `list(GoogleApi.Sheets.V4.Model.SortSpec.t)`, *default:* `nil`) - The sort order per column. Later specifications are used when values
are equal in the earlier specifications.
* `title` (*type:* `String.t`, *default:* `nil`) - The name of the filter view.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:criteria => %{optional(String.t()) => GoogleApi.Sheets.V4.Model.FilterCriteria.t()},
:filterViewId => integer(),
:namedRangeId => String.t(),
:range => GoogleApi.Sheets.V4.Model.GridRange.t(),
:sortSpecs => list(GoogleApi.Sheets.V4.Model.SortSpec.t()),
:title => String.t()
}
field(:criteria, as: GoogleApi.Sheets.V4.Model.FilterCriteria, type: :map)
field(:filterViewId)
field(:namedRangeId)
field(:range, as: GoogleApi.Sheets.V4.Model.GridRange)
field(:sortSpecs, as: GoogleApi.Sheets.V4.Model.SortSpec, type: :list)
field(:title)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.FilterView do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.FilterView.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.FilterView do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.985915 | 167 | 0.693202 |
93fe79cad38021bd06204206c49d0d8f649645f1 | 8,646 | exs | Elixir | test/rexbug/translator_test.exs | NAR/rexbug | 3b662b2b56f9bb97eea098fd0a9be6d04c30f383 | [
"MIT"
] | null | null | null | test/rexbug/translator_test.exs | NAR/rexbug | 3b662b2b56f9bb97eea098fd0a9be6d04c30f383 | [
"MIT"
] | null | null | null | test/rexbug/translator_test.exs | NAR/rexbug | 3b662b2b56f9bb97eea098fd0a9be6d04c30f383 | [
"MIT"
] | null | null | null | defmodule Rexbug.TranslatorTest do
use ExUnit.Case
import Rexbug.Translator
doctest Rexbug.Translator
describe "Translator.translate/1" do
test "translates Foo.Bar.baz right" do
assert {:ok, '\'Elixir.Foo.Bar\':\'abc\'()'} == translate("Foo.Bar.abc")
assert {:ok, '\'Elixir.Foo.Bar\':\'abc\'()'} == translate("Foo.Bar.abc()")
end
test "a simple erlang module.fun right" do
assert {:ok, '\'redbug\':\'help\'()'} == translate(":redbug.help()")
assert {:ok, '\'redbug\':\'help\'()'} == translate(":redbug.help")
end
test "errors out in situations when fragments are duplicated (?)" do
assert {:error, {:invalid_module, _}} = translate(":redbug.one.two()")
assert {:error, _} = translate(":redbug.one(:foo)(:bar)")
end
test "just an erlang module" do
assert {:ok, '\'cowboy\''} == translate(":cowboy")
end
test "just an elixir module" do
assert {:ok, '\'Elixir.Foo.Bar\''} == translate("Foo.Bar")
end
test "actions" do
assert {:ok, '\'cowboy\' -> return'} == translate(":cowboy :: return")
assert {:ok, '\'cowboy\':\'fun\'() -> return;stack'} == translate(":cowboy.fun() :: return;stack")
end
test "parsing rubbish" do
assert {:error, _} = translate("ldkjf 'dkf ls;lf sjdkf 4994{}")
end
test "literal arity" do
assert {:ok, '\'cowboy\':\'do_sth\'/5'} == translate(":cowboy.do_sth/5")
end
test "whatever arity" do
assert {:ok, '\'cowboy\':\'do_sth\''} == translate(":cowboy.do_sth/x")
assert {:ok, '\'cowboy\':\'do_sth\''} == translate(":cowboy.do_sth/really_whatever")
end
test "invalid arity" do
assert {:error, _} = translate(":cowboy.do_sth/(1 + 1)")
end
test "both arity and function args provided" do
assert {:error, _} = translate(":cowboy.foo(1, 2)/3")
end
test "arity without a function" do
assert {:error, _} = translate(":cowboy/3")
end
test "args without a function" do
assert {:error, _} = translate(":cowboy(:wat)")
end
test "invalid arg" do
assert {:error, _} = translate(":cowboy.do_sth(2 + 3)")
end
test "errors out on really unexpected input" do
assert {:error, :invalid_trace_pattern_type} = translate(:wat)
assert {:error, :invalid_trace_pattern_type} = translate(%{})
assert {:error, :invalid_trace_pattern_type} = translate({:foo, "bar"})
end
test "translates send and receive correctly" do
assert {:ok, :send} = translate(:send)
assert {:ok, :send} = translate("send")
assert {:ok, :receive} = translate(:receive)
assert {:ok, :receive} = translate("receive")
end
test "translates multiple trace patterns correctly" do
assert {:ok, [:send, '\'ets\'']} = translate([:send, ":ets"])
assert {:error, :invalid_trace_pattern_type} = translate([:send, ":ets", :wat])
end
end
describe "Translator.translate/1 translating args" do
test "atoms" do
assert_args('\'foo\'', ":foo")
assert_args('\'foo\', \'bar baz\'', ":foo, :\"bar baz\"")
end
test "integer literals" do
assert_args('-5, 255', "-5, 0xFF")
end
test "floats aren't handled" do
assert_args_error("3.14159")
end
test "booleans" do
assert_args('true, false', "true, false")
end
test "strings" do
assert_args('<<"wat">>', "\"wat\"")
assert_args('<<119, 97, 116, 0>>', "\"wat\0\"")
end
test "binaries" do
assert_args('<<>>', "<<>>")
assert_args('<<0>>', "<<0>>")
assert_args('<<119, 97, 116>>', "<<\"wat\">>")
assert_args('<<1, 119, 97, 116, 0>>', "<<1, \"wat\", 0>>")
assert_args('<<119, 97, 116, 0>>', "<<\"wat\0\">>")
end
test "nil" do
assert_args('nil', "nil")
end
test "variables" do
assert_args('Foo, _, _els', "foo, _, _els")
end
test "lists" do
assert_args('[1, X], 3', "[1, x], 3")
end
test "tuples" do
assert_args('{}', "{}")
assert_args('{A, B}', "{a, b}")
assert_args('{_, X, 1}', "{_, x, 1}")
end
test "invalid argument in a list" do
assert_args_error("[3, -a]")
assert_args_error("[3, -:foo.bar()]")
end
test "maps" do
assert_args('\#{1 => One, \'two\' => 2}', "%{1 => one, :two => 2}")
assert_args('\#{\'name\' => _}', "%{name: _}")
assert_args('\#{}', "%{}")
assert_args('\#{\'foo\' => \#{1 => _}}', "%{foo: %{1 => _}}")
end
test "maps with invalid matching of variable in the key" do
assert_args_error("%{name => _}")
end
end
describe "Translator.translate_options/1" do
test "returns empty list for an empty list" do
assert {:ok, []} == translate_options([])
end
test "passes through irrelevant options" do
assert {:ok, [abc: :def, foo: :bar]} == translate_options([abc: :def, foo: :bar])
end
test "returns an error for invalid options" do
assert {:error, :invalid_options} == translate_options(:foo)
assert {:error, :invalid_options} == translate_options([:foo])
end
test "translates the file options right" do
assert {:ok, [file: 'a.txt', print_file: 'b.txt']} == translate_options(file: "a.txt", print_file: "b.txt")
end
end
describe "Translator.split_to_mfag_and_actions!/1" do
test "a full case" do
code = "Foo.Bar.xyz(1, :foo, \"bar\") :: return;stack "
assert {"Foo.Bar.xyz(1, :foo, \"bar\")", "return;stack"} == split_to_mfag_and_actions!(code)
end
test "most basic case" do
assert {":foo", ""} == split_to_mfag_and_actions!(":foo")
end
end
describe "Translator.translate/1 translating guards" do
test "a simple is_integer()" do
res = translate(":erlang.term_to_binary(x) when is_integer(x)")
assert {:ok, '\'erlang\':\'term_to_binary\'(X) when is_integer(X)'} == res
end
test "a simple is_integer() with a helper function" do
assert_guards('is_integer(X)', "is_integer(x)")
end
test "a simple guard negation is_integer() with a helper function" do
assert_guards('not is_integer(X)', "not is_integer(x)")
end
test "alternative of two guards" do
assert_guards('(is_integer(X) orelse is_float(X))', "is_integer(x) or is_float(x)")
end
test "comparison in guards" do
assert_guards('X =< Y', "x <= y")
end
test "complex case" do
assert_guards('(X =< Y andalso not is_float(X))', "x <= y and not is_float(x)")
end
test "another complex case" do
assert_guards('map_size(X) < 1', "map_size(x) < 1")
end
test "invalid guard argument" do
assert_guards_error("is_integer(x + y)")
end
test "invalid guard function" do
assert_guards_error("not_a_guard_function(x)")
end
test "invalid argument for in-guard comparison" do
assert_guards_error("foo(x) < y")
assert_guards_error("x >= bar(y)")
end
test "invalid guard in multiple guards" do
assert_guards_error("foo(x) and is_integer(y)")
assert_guards_error("is_binary(x) and bar(y)")
end
test "operator precedence" do
assert_guards('((is_nil(X) andalso is_nil(Y)) orelse is_nil(Z))', "is_nil(x) and is_nil(y) or is_nil(z)")
assert_guards('(is_nil(X) orelse (is_nil(Y) andalso is_nil(Z)))', "is_nil(x) or is_nil(y) and is_nil(z)")
end
test "nil translation" do
assert_guards('X /= nil', "x != nil")
assert_guards('not is_nil(X)', "not is_nil(x)")
end
end
defp assert_args(expected, input) do
input = ":a.b(#{input}, 0)"
assert {:ok, '\'a\':\'b\'(' ++ expected ++ ', 0)'} == translate(input)
end
defp assert_args_error(input) do
input = ":a.b(#{input}, 0)"
assert {:error, _} = translate(input)
end
defp assert_guards(expected, input) do
input = ":a.b(x, y, z) when #{input}"
assert {:ok, '\'a\':\'b\'(X, Y, Z) when ' ++ expected} == translate(input)
end
defp assert_guards_error(input) do
input = ":a.b(x, y, z) when #{input}"
assert {:error, _} = translate(input)
end
test "author's assumptions" do
assert {:{}, _line, []} = Code.string_to_quoted!("{}")
assert {:{}, _line, [1]} = Code.string_to_quoted!("{1}")
assert {1, 2} = Code.string_to_quoted!("{1, 2}")
assert {:{}, _line, [1, 2, 3]} = Code.string_to_quoted!("{1, 2, 3}")
assert {:{}, _line, [1, 2, 3, 4]} = Code.string_to_quoted!("{1, 2, 3, 4}")
assert {:{}, _line, [1, 2, 3, 4, 5]} = Code.string_to_quoted!("{1, 2, 3, 4, 5}")
assert {:{}, _line, [1, 2, 3, 4, 5, 6]} = Code.string_to_quoted!("{1, 2, 3, 4, 5, 6}")
end
end
| 30.551237 | 113 | 0.5805 |
93fe8ee1ba95c00199426794735751c0355d7dd0 | 1,187 | exs | Elixir | mix.exs | mikan/elixir-practice | 624525605eb2324e0c55a4ddcb68388c0d2ecefc | [
"Apache-2.0"
] | null | null | null | mix.exs | mikan/elixir-practice | 624525605eb2324e0c55a4ddcb68388c0d2ecefc | [
"Apache-2.0"
] | 1 | 2020-01-28T00:19:53.000Z | 2020-01-28T00:19:53.000Z | mix.exs | mikan/elixir-practice | 624525605eb2324e0c55a4ddcb68388c0d2ecefc | [
"Apache-2.0"
] | null | null | null | defmodule ElixirPractice.Mixfile do
use Mix.Project
def project do
[
app: :elixir_practice,
version: "0.1.0",
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: ["coveralls": :test, "coveralls.detail": :test, "coveralls.post": :test, "coveralls.html": :test]
]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[extra_applications: [:logger, :httpotion]]
end
# Dependencies can be Hex packages:
#
# {:my_dep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:my_dep, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:json, "~> 1.0"},
{:httpotion, "~> 3.0.2"}, # Replaced from httpoison, See https://elixirforum.com/t/programming-elixir-chapter-13-problem/5793/10
{:poison, "~> 2.2"},
{:excoveralls, "~> 0.7", only: :test}
]
end
end
| 27.604651 | 134 | 0.613311 |
93feadfc016d2f9b7849e7f307245d9a65f99730 | 1,412 | ex | Elixir | server/apps/boardr_api/lib/boardr_api/utils/method_not_allowed_controller.ex | AlphaHydrae/boardr | 98eed02801f88c065a24bf13051c5cf96270a5f7 | [
"MIT"
] | 1 | 2021-04-08T17:26:27.000Z | 2021-04-08T17:26:27.000Z | server/apps/boardr_api/lib/boardr_api/utils/method_not_allowed_controller.ex | AlphaHydrae/boardr | 98eed02801f88c065a24bf13051c5cf96270a5f7 | [
"MIT"
] | 1 | 2022-02-13T05:50:46.000Z | 2022-02-13T05:50:46.000Z | server/apps/boardr_api/lib/boardr_api/utils/method_not_allowed_controller.ex | AlphaHydrae/boardr | 98eed02801f88c065a24bf13051c5cf96270a5f7 | [
"MIT"
] | null | null | null | defmodule BoardrApi.MethodNotAllowedController do
use BoardrApi, :controller
def match(%Plug.Conn{path_info: path_info} = conn, _) do
routes =
BoardrApi.Router.__routes__()
|> Enum.map(&({&1.verb, &1.path}))
|> Enum.uniq()
|> Enum.map(&({elem(&1, 0), elem(&1, 1) |> String.split("/") |> Enum.drop(1)}))
matching_routes = Enum.filter(routes, fn route -> path_info_matches_route(path_info, route) end)
if Enum.empty?(matching_routes) do
raise %Phoenix.Router.NoRouteError{}
else
raise %BoardrApi.Errors.MethodNotAllowed{
allowed_methods: Enum.map(matching_routes, fn route -> elem(route, 0) end),
conn: conn,
router: BoardrApi.Router
}
end
end
defp path_info_matches_route(path_info, route) when is_list(path_info) and is_tuple(route) do
path_info_matches_route_path(path_info, elem(route, 1))
end
defp path_info_matches_route_path([], []) do
true
end
defp path_info_matches_route_path(path_info, route_path) when is_list(path_info) and is_list(route_path) and length(path_info) != length(route_path) do
false
end
defp path_info_matches_route_path([ path_info_head | path_info_rest ], [ route_path_head | route_path_rest ]) do
(String.starts_with?(route_path_head, ":") or path_info_head === route_path_head) and path_info_matches_route_path(path_info_rest, route_path_rest)
end
end
| 35.3 | 153 | 0.706799 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.