hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7991f6aae19c8e372e1afae217fdf0decfd7b431 | 71 | ex | Elixir | chatourius/lib/chatourius/repo.ex | Dmdv/ElixirPlayground | 02d9e8a7fdd6e8742e200430debc9f0ec7fd28a1 | [
"Apache-2.0"
] | null | null | null | chatourius/lib/chatourius/repo.ex | Dmdv/ElixirPlayground | 02d9e8a7fdd6e8742e200430debc9f0ec7fd28a1 | [
"Apache-2.0"
] | null | null | null | chatourius/lib/chatourius/repo.ex | Dmdv/ElixirPlayground | 02d9e8a7fdd6e8742e200430debc9f0ec7fd28a1 | [
"Apache-2.0"
] | null | null | null | defmodule Chatourius.Repo do
use Ecto.Repo, otp_app: :chatourius
end
| 17.75 | 37 | 0.788732 |
79920546f6f5734622df1973a2deccfac457fdbd | 1,514 | ex | Elixir | bank_api/lib/bank_api_web/endpoint.ex | HectorIFC/bank_api | 5c1b59d6c8c27bca4cf6c45f5d309152de089436 | [
"MIT"
] | 1 | 2020-07-07T14:23:14.000Z | 2020-07-07T14:23:14.000Z | bank_api/lib/bank_api_web/endpoint.ex | HectorIFC/bank_api | 5c1b59d6c8c27bca4cf6c45f5d309152de089436 | [
"MIT"
] | null | null | null | bank_api/lib/bank_api_web/endpoint.ex | HectorIFC/bank_api | 5c1b59d6c8c27bca4cf6c45f5d309152de089436 | [
"MIT"
] | null | null | null | defmodule BankApiWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :bank_api
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_bank_api_key",
signing_salt: "thIXYAou"
]
socket "/socket", BankApiWeb.UserSocket,
websocket: true,
longpoll: false
socket "/live", Phoenix.LiveView.Socket, websocket: [connect_info: [session: @session_options]]
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :bank_api,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
plug Phoenix.CodeReloader
plug Phoenix.Ecto.CheckRepoStatus, otp_app: :bank_api
end
plug Phoenix.LiveDashboard.RequestLogger,
param_key: "request_logger",
cookie_key: "request_logger"
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
plug BankApiWeb.Router
end
| 28.566038 | 97 | 0.715324 |
799227ad30ce4f0ddc687ea09fb35ab12b895717 | 1,913 | ex | Elixir | apps/dockup_ui/lib/dockup_ui/endpoint.ex | rudydydy/dockup | 0d05d1ef65cc5523800bd852178361521cd3e7d8 | [
"MIT"
] | null | null | null | apps/dockup_ui/lib/dockup_ui/endpoint.ex | rudydydy/dockup | 0d05d1ef65cc5523800bd852178361521cd3e7d8 | [
"MIT"
] | null | null | null | apps/dockup_ui/lib/dockup_ui/endpoint.ex | rudydydy/dockup | 0d05d1ef65cc5523800bd852178361521cd3e7d8 | [
"MIT"
] | null | null | null | defmodule DockupUi.Endpoint do
use Phoenix.Endpoint, otp_app: :dockup_ui
socket "/socket", DockupUi.UserSocket
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/", from: :dockup_ui, gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt icons)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session,
store: :cookie,
key: "_dockup_ui_key",
signing_salt: "vhxBVc+D"
plug DockupUi.Router
@doc """
Callback invoked for dynamically configuring the endpoint.
It receives the endpoint configuration and checks if
configuration should be loaded from the system environment.
"""
def init(_key, config) do
if config[:load_from_system_env] do
config_from_env_vars =
config
|> load_port_from_system_env
|> load_host_from_system_env
{:ok, config_from_env_vars}
else
{:ok, config}
end
end
defp load_port_from_system_env(config) do
port = System.get_env("PORT") ||
raise "expected the PORT environment variable to be set"
Keyword.put(config, :http, [:inet6, port: port])
end
defp load_host_from_system_env(config) do
dockup_url = Application.get_env(:dockup_ui, :dockup_url_host) ||
raise "expected DOCKUP_URL_HOST env var to be set"
put_in(config, [:url, :host], dockup_url)
end
end
| 26.569444 | 69 | 0.703084 |
799234c0b76898fb90e0205b592a6be1c748ed15 | 3,988 | exs | Elixir | mix.exs | podlove/radiator | 3f92973700a218d95a7eed178f96f7c1d74e39a6 | [
"MIT"
] | 92 | 2019-01-03T11:46:23.000Z | 2022-02-19T21:28:44.000Z | mix.exs | podlove/radiator | 3f92973700a218d95a7eed178f96f7c1d74e39a6 | [
"MIT"
] | 350 | 2019-04-11T07:55:51.000Z | 2021-08-03T11:19:05.000Z | mix.exs | podlove/radiator | 3f92973700a218d95a7eed178f96f7c1d74e39a6 | [
"MIT"
] | 10 | 2019-04-18T12:47:27.000Z | 2022-01-25T20:49:15.000Z | defmodule Radiator.MixProject do
use Mix.Project
@version "0.3.0"
def project do
[
app: :radiator,
version: @version,
elixir: "~> 1.9",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
package: package(),
# Docs
name: "Radiator",
docs: docs()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Radiator.Application, []},
extra_applications: [:logger, :runtime_tools, :bamboo, :bamboo_smtp, :inet_cidr]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.4.0"},
{:phoenix_pubsub, "~> 1.1"},
{:phoenix_ecto, "~> 4.0"},
{:ecto_sql, "~> 3.0"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:hal, "~> 1.1"},
{:httpoison, "~> 1.5"},
{:arc, "~> 0.11"},
{:arc_ecto, github: "podlove/arc_ecto"},
{:ex_aws, "~> 2.1"},
{:ex_aws_s3, "~> 2.0"},
# {:ecto_autoslug_field, "~> 2.0"},
# for ecto 3.2 compatiblity without warnings (switch back to release as soon as they release)
{:ecto_autoslug_field,
git: "https://github.com/sobolevn/ecto_autoslug_field.git",
ref: "49dc8d53919bb80e9e188fcb3aa9dcfb34a8dea3"},
# for ex_aws
{:hackney, "~> 1.15"},
{:sweet_xml, "~> 0.6.5"},
# for feed import
{:metalove, "~> 0.3"},
{:xml_builder, "~> 2.1", override: true},
{:ex_machina, "~> 2.3", only: :test},
{:elixir_uuid, "~> 1.2"},
{:absinthe_plug, "~> 1.4"},
{:absinthe_phoenix, "~> 1.4.0"},
{:timex, "~> 3.5"},
{:cors_plug, "~> 2.0"},
# audio file metadata extraction and modifying
{:ffmpex, "~> 0.7.0"},
# authentication
{:guardian, "~> 2.0"},
{:argon2_elixir, "~> 2.0"},
# mail
{:bamboo_smtp, "~> 2.0"},
# for documentation
{:earmark, "~> 1.2", only: :dev},
{:ex_doc, "~> 0.19", only: :dev},
{:chapters, "~> 1.0"},
{:dataloader, "~> 1.0"},
{:mime, "~> 1.2"},
# tracking / analytics
{:remote_ip, "~> 0.2.0"},
{:ua_inspector, "~> 2.2.0"},
# job processor
{:oban, "~> 0.8"},
# cron-like job scheduler
{:quantum, "~> 2.3"},
# create and use temporary files/directories
{:temp, "~> 0.4"}
]
end
defp package do
[
maintainers: ["Eric Teubert", "Dominik Wagner"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/podlove/radiator"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
defp docs do
[
main: "readme",
source_ref: "v#{@version}",
source_url: "https://github.com/podlove/radiator",
logo: "guides/images/podlove-radiator-logo.svg",
extras: [
"README.md",
"guides/data_and_structure.md",
"guides/rest_api.md",
"guides/graphql_api.md",
"guides/Download Tracking.md",
"guides/Files.md",
"guides/Users and Permissions.md"
]
]
end
end
| 28.485714 | 103 | 0.537362 |
79923f0475a07fdbbf990b1d5c1123ca903c7aa1 | 22,342 | exs | Elixir | lib/ex_unit/test/ex_unit/assertions_test.exs | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | 2 | 2018-11-15T06:38:14.000Z | 2018-11-17T18:03:14.000Z | lib/ex_unit/test/ex_unit/assertions_test.exs | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | 1 | 2019-04-25T12:52:49.000Z | 2019-04-25T13:27:31.000Z | lib/ex_unit/test/ex_unit/assertions_test.exs | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | null | null | null | Code.require_file("../test_helper.exs", __DIR__)
defmodule ExUnit.AssertionsTest.Value do
def tuple, do: {2, 1}
def falsy, do: nil
def truthy, do: :truthy
end
defmodule ExUnit.AssertionsTest.BrokenError do
defexception [:message]
@impl true
def message(_) do
raise "error"
end
end
alias ExUnit.AssertionsTest.{BrokenError, Value}
defmodule ExUnit.AssertionsTest do
use ExUnit.Case, async: true
defmacro sigil_l({:<<>>, _, [string]}, _), do: Code.string_to_quoted!(string, [])
defmacro argless_macro(), do: raise("should not be invoked")
defmacrop assert_ok(arg) do
quote do
assert {:ok, val} = ok(unquote(arg))
end
end
require Record
Record.defrecordp(:vec, x: 0, y: 0, z: 0)
defguardp is_zero(zero) when zero == 0
test "assert inside macro" do
assert_ok(42)
end
test "assert with truthy value" do
:truthy = assert Value.truthy()
end
test "assert with message when value is falsy" do
try do
"This should never be tested" = assert Value.falsy(), "This should be truthy"
rescue
error in [ExUnit.AssertionError] ->
"This should be truthy" = error.message
end
end
test "assert when value evaluates to falsy" do
try do
"This should never be tested" = assert Value.falsy()
rescue
error in [ExUnit.AssertionError] ->
"assert(Value.falsy())" = error.expr |> Macro.to_string()
"Expected truthy, got nil" = error.message
end
end
test "assert arguments in special form" do
true =
assert (case :ok do
:ok -> true
end)
end
test "assert arguments semantics on function call" do
x = 1
true = assert not_equal(x = 2, x)
2 = x
end
test "assert arguments are not kept for operators" do
try do
"This should never be tested" = assert !Value.truthy()
rescue
error in [ExUnit.AssertionError] ->
false = is_list(error.args)
end
end
test "assert with equality" do
try do
"This should never be tested" = assert 1 + 1 == 1
rescue
error in [ExUnit.AssertionError] ->
1 = error.right
2 = error.left
"assert(1 + 1 == 1)" = error.expr |> Macro.to_string()
end
end
test "assert with equality in reverse" do
try do
"This should never be tested" = assert 1 == 1 + 1
rescue
error in [ExUnit.AssertionError] ->
1 = error.left
2 = error.right
"assert(1 == 1 + 1)" = error.expr |> Macro.to_string()
end
end
test "assert exposes nested macro variables in matches" do
assert ~l(a) = 1
assert a == 1
assert {~l(b), ~l(c)} = {2, 3}
assert b == 2
assert c == 3
end
test "assert does not expand variables" do
assert argless_macro = 1
assert argless_macro == 1
end
test "refute when value is falsy" do
false = refute false
nil = refute Value.falsy()
end
test "refute when value evaluates to truthy" do
try do
refute Value.truthy()
raise "refute was supposed to fail"
rescue
error in [ExUnit.AssertionError] ->
"refute(Value.truthy())" = Macro.to_string(error.expr)
"Expected false or nil, got :truthy" = error.message
end
end
test "assert match when equal" do
{2, 1} = assert {2, 1} = Value.tuple()
end
test "assert match expands argument in match context" do
{x, y, z} = {1, 2, 3}
assert vec(x: ^x, y: ^y) = vec(x: x, y: y, z: z)
end
test "assert match with pinned variable" do
a = 1
{2, 1} = assert {2, ^a} = Value.tuple()
try do
assert {^a, 1} = Value.tuple()
rescue
error in [ExUnit.AssertionError] ->
"match (=) failed\n" <> "The following variables were pinned:\n" <> " a = 1" =
error.message
"assert({^a, 1} = Value.tuple())" = Macro.to_string(error.expr)
end
end
test "assert match with pinned variable from another context" do
var!(a, Elixir) = 1
{2, 1} = assert {2, ^var!(a, Elixir)} = Value.tuple()
try do
assert {^var!(a, Elixir), 1} = Value.tuple()
rescue
error in [ExUnit.AssertionError] ->
"match (=) failed" = error.message
"assert({^var!(a, Elixir), 1} = Value.tuple())" = Macro.to_string(error.expr)
end
end
test "assert match?" do
true = assert match?({2, 1}, Value.tuple())
try do
"This should never be tested" = assert match?({:ok, _}, error(true))
rescue
error in [ExUnit.AssertionError] ->
"match (match?) failed" = error.message
"assert(match?({:ok, _}, error(true)))" = Macro.to_string(error.expr)
"{:error, true}" = Macro.to_string(error.right)
end
end
test "refute match?" do
false = refute match?({1, 1}, Value.tuple())
try do
"This should never be tested" = refute match?({:error, _}, error(true))
rescue
error in [ExUnit.AssertionError] ->
"match (match?) succeeded, but should have failed" = error.message
"refute(match?({:error, _}, error(true)))" = Macro.to_string(error.expr)
"{:error, true}" = Macro.to_string(error.right)
end
end
test "assert match? with pinned variable" do
a = 1
try do
"This should never be tested" = assert(match?({^a, 1}, Value.tuple()))
rescue
error in [ExUnit.AssertionError] ->
"match (match?) failed\nThe following variables were pinned:\n a = 1" = error.message
"assert(match?({^a, 1}, Value.tuple()))" = Macro.to_string(error.expr)
end
end
test "refute match? with pinned variable" do
a = 2
try do
"This should never be tested" = refute(match?({^a, 1}, Value.tuple()))
rescue
error in [ExUnit.AssertionError] ->
"""
match (match?) succeeded, but should have failed
The following variables were pinned:
a = 2\
""" = error.message
"refute(match?({^a, 1}, Value.tuple()))" = Macro.to_string(error.expr)
end
end
test "assert receive waits" do
parent = self()
spawn(fn -> send(parent, :hello) end)
:hello = assert_receive :hello
end
@string "hello"
test "assert receive with interpolated compile-time string" do
parent = self()
spawn(fn -> send(parent, "string: hello") end)
"string: #{@string}" = assert_receive "string: #{@string}"
end
test "assert receive accepts custom failure message" do
send(self(), :hello)
assert_receive message, 0, "failure message"
:hello = message
end
test "assert receive with message in mailbox after timeout, but before reading mailbox tells user to increase timeout" do
parent = self()
# This is testing a race condition, so it's not
# guaranteed this works under all loads of the system
timeout = 100
spawn(fn -> Process.send_after(parent, :hello, timeout) end)
try do
assert_receive :hello, timeout
rescue
error in [ExUnit.AssertionError] ->
true =
error.message =~ "Found message matching :hello after 100ms" or
error.message =~ "No message matching :hello after 100ms"
end
end
test "assert_receive exposes nested macro variables" do
send(self(), {:hello})
assert_receive {~l(a)}, 0, "failure message"
assert a == :hello
end
test "assert_receive raises on invalid timeout" do
timeout = ok(1)
try do
assert_receive {~l(a)}, timeout
rescue
error in [ArgumentError] ->
"timeout must be a non-negative integer, got: {:ok, 1}" = error.message
end
end
test "assert_receive expands argument in match context" do
{x, y, z} = {1, 2, 3}
send(self(), vec(x: x, y: y, z: z))
assert_receive vec(x: ^x, y: ^y)
end
test "assert_receive expands argument in guard context" do
send(self(), {:ok, 0, :other})
assert_receive {:ok, val, atom} when is_zero(val) and is_atom(atom)
end
test "assert received does not wait" do
send(self(), :hello)
:hello = assert_received :hello
end
@received :hello
test "assert received with module attribute" do
send(self(), :hello)
:hello = assert_received @received
end
test "assert received with pinned variable" do
status = :valid
send(self(), {:status, :invalid})
try do
"This should never be tested" = assert_received {:status, ^status}
rescue
error in [ExUnit.AssertionError] ->
"""
No message matching {:status, ^status} after 0ms.
The following variables were pinned:
status = :valid
Process mailbox:
{:status, :invalid}\
""" = error.message
end
end
test "assert received with multiple identical pinned variables" do
status = :valid
send(self(), {:status, :invalid, :invalid})
try do
"This should never be tested" = assert_received {:status, ^status, ^status}
rescue
error in [ExUnit.AssertionError] ->
"""
No message matching {:status, ^status, ^status} after 0ms.
The following variables were pinned:
status = :valid
Process mailbox:
{:status, :invalid, :invalid}\
""" = error.message
end
end
test "assert received with multiple unique pinned variables" do
status = :valid
other_status = :invalid
send(self(), {:status, :invalid, :invalid})
try do
"This should never be tested" = assert_received {:status, ^status, ^other_status}
rescue
error in [ExUnit.AssertionError] ->
"""
No message matching {:status, ^status, ^other_status} after 0ms.
The following variables were pinned:
status = :valid
other_status = :invalid
Process mailbox:
{:status, :invalid, :invalid}\
""" = error.message
end
end
test "assert received when empty mailbox" do
try do
"This should never be tested" = assert_received :hello
rescue
error in [ExUnit.AssertionError] ->
"No message matching :hello after 0ms.\nThe process mailbox is empty." = error.message
end
end
test "assert received when different message" do
send(self(), {:message, :not_expected, :at_all})
try do
"This should never be tested" = assert_received :hello
rescue
error in [ExUnit.AssertionError] ->
"""
No message matching :hello after 0ms.
Process mailbox:
{:message, :not_expected, :at_all}\
""" = error.message
end
end
test "assert received when different message having more than 10 on mailbox" do
for i <- 1..11, do: send(self(), {:message, i})
try do
"This should never be tested" = assert_received x when x == :hello
rescue
error in [ExUnit.AssertionError] ->
"""
No message matching x when x == :hello after 0ms.
Process mailbox:
{:message, 2}
{:message, 3}
{:message, 4}
{:message, 5}
{:message, 6}
{:message, 7}
{:message, 8}
{:message, 9}
{:message, 10}
{:message, 11}
Showing only last 10 of 11 messages.\
""" = error.message
end
end
test "assert received binds variables" do
send(self(), {:hello, :world})
assert_received {:hello, world}
:world = world
end
test "assert received does not leak external variables used in guards" do
send(self(), {:hello, :world})
guard_world = :world
assert_received {:hello, world} when world == guard_world
:world = world
end
test "refute received does not wait" do
false = refute_received :hello
end
test "refute receive waits" do
false = refute_receive :hello
end
test "refute received when equal" do
send(self(), :hello)
try do
"This should never be tested" = refute_received :hello
rescue
error in [ExUnit.AssertionError] ->
"Unexpectedly received message :hello (which matched :hello)" = error.message
end
end
test "assert in when member" do
true = assert 'foo' in ['foo', 'bar']
end
test "assert in when is not member" do
try do
"This should never be tested" = assert 'foo' in 'bar'
rescue
error in [ExUnit.AssertionError] ->
'foo' = error.left
'bar' = error.right
"assert('foo' in 'bar')" = Macro.to_string(error.expr)
end
end
test "refute in when is not member" do
false = refute 'baz' in ['foo', 'bar']
end
test "refute in when is member" do
try do
"This should never be tested" = refute 'foo' in ['foo', 'bar']
rescue
error in [ExUnit.AssertionError] ->
'foo' = error.left
['foo', 'bar'] = error.right
"refute('foo' in ['foo', 'bar'])" = Macro.to_string(error.expr)
end
end
test "assert match" do
{:ok, true} = assert {:ok, _} = ok(true)
end
test "assert match with bitstrings" do
"foobar" = assert "foo" <> bar = "foobar"
"bar" = bar
end
test "assert match when no match" do
try do
assert {:ok, _} = error(true)
rescue
error in [ExUnit.AssertionError] ->
"match (=) failed" = error.message
"assert({:ok, _} = error(true))" = Macro.to_string(error.expr)
"{:error, true}" = Macro.to_string(error.right)
end
end
test "assert match when falsy but not match" do
try do
assert {:ok, _x} = nil
rescue
error in [ExUnit.AssertionError] ->
"match (=) failed" = error.message
"assert({:ok, _x} = nil)" = Macro.to_string(error.expr)
"nil" = Macro.to_string(error.right)
end
end
test "assert match when falsy" do
try do
assert _x = nil
rescue
error in [ExUnit.AssertionError] ->
"Expected truthy, got nil" = error.message
"assert(_x = nil)" = Macro.to_string(error.expr)
end
end
test "refute match when no match" do
try do
"This should never be tested" = refute _ = ok(true)
rescue
error in [ExUnit.AssertionError] ->
"refute(_ = ok(true))" = Macro.to_string(error.expr)
"Expected false or nil, got {:ok, true}" = error.message
end
end
test "assert regex match" do
true = assert "foo" =~ ~r(o)
end
test "assert regex match when no match" do
try do
"This should never be tested" = assert "foo" =~ ~r(a)
rescue
error in [ExUnit.AssertionError] ->
"foo" = error.left
~r{a} = error.right
end
end
test "refute regex match" do
false = refute "foo" =~ ~r(a)
end
test "refute regex match when match" do
try do
"This should never be tested" = refute "foo" =~ ~r(o)
rescue
error in [ExUnit.AssertionError] ->
"foo" = error.left
~r"o" = error.right
end
end
test "assert raise with no error" do
"This should never be tested" = assert_raise ArgumentError, fn -> nil end
rescue
error in [ExUnit.AssertionError] ->
"Expected exception ArgumentError but nothing was raised" = error.message
end
test "assert raise with error" do
error = assert_raise ArgumentError, fn -> raise ArgumentError, "test error" end
"test error" = error.message
end
test "assert raise with some other error" do
"This should never be tested" =
assert_raise ArgumentError, fn -> Not.Defined.function(1, 2, 3) end
rescue
error in [ExUnit.AssertionError] ->
"Expected exception ArgumentError but got UndefinedFunctionError " <>
"(function Not.Defined.function/3 is undefined (module Not.Defined is not available))" =
error.message
end
test "assert raise with some other error includes stacktrace from original error" do
"This should never be tested" =
assert_raise ArgumentError, fn -> Not.Defined.function(1, 2, 3) end
rescue
ExUnit.AssertionError ->
[{Not.Defined, :function, [1, 2, 3], _} | _] = __STACKTRACE__
end
test "assert raise with Erlang error" do
assert_raise SyntaxError, fn ->
List.flatten(1)
end
rescue
error in [ExUnit.AssertionError] ->
"Expected exception SyntaxError but got FunctionClauseError (no function clause matching in :lists.flatten/1)" =
error.message
end
test "assert raise comparing messages (for equality)" do
assert_raise RuntimeError, "foo", fn ->
raise RuntimeError, "bar"
end
rescue
error in [ExUnit.AssertionError] ->
"""
Wrong message for RuntimeError
expected:
"foo"
actual:
"bar"\
""" = error.message
end
test "assert raise comparing messages (with a regex)" do
assert_raise RuntimeError, ~r/ba[zk]/, fn ->
raise RuntimeError, "bar"
end
rescue
error in [ExUnit.AssertionError] ->
"""
Wrong message for RuntimeError
expected:
~r/ba[zk]/
actual:
"bar"\
""" = error.message
end
test "assert raise with an exception with bad message/1 implementation" do
assert_raise BrokenError, fn ->
raise BrokenError
end
rescue
error in [ExUnit.AssertionError] ->
"""
Got exception ExUnit.AssertionsTest.BrokenError but it failed to produce a message with:
** (RuntimeError) error
""" <> _ = error.message
end
test "assert greater than operator" do
true = assert 2 > 1
end
test "assert greater than operator error" do
"This should never be tested" = assert 1 > 2
rescue
error in [ExUnit.AssertionError] ->
1 = error.left
2 = error.right
"assert(1 > 2)" = Macro.to_string(error.expr)
end
test "assert less or equal than operator" do
true = assert 1 <= 2
end
test "assert less or equal than operator error" do
"This should never be tested" = assert 2 <= 1
rescue
error in [ExUnit.AssertionError] ->
"assert(2 <= 1)" = Macro.to_string(error.expr)
2 = error.left
1 = error.right
end
test "assert operator with expressions" do
greater = 5
true = assert 1 + 2 < greater
end
test "assert operator with custom message" do
"This should never be tested" = assert 1 > 2, "assertion"
rescue
error in [ExUnit.AssertionError] ->
"assertion" = error.message
end
test "assert lack of equality" do
try do
"This should never be tested" = assert "one" != "one"
rescue
error in [ExUnit.AssertionError] ->
"Assertion with != failed, both sides are exactly equal" = error.message
"one" = error.left
end
try do
"This should never be tested" = assert 2 != 2.0
rescue
error in [ExUnit.AssertionError] ->
"Assertion with != failed" = error.message
2 = error.left
2.0 = error.right
end
end
test "refute equality" do
try do
"This should never be tested" = refute "one" == "one"
rescue
error in [ExUnit.AssertionError] ->
"Refute with == failed, both sides are exactly equal" = error.message
"one" = error.left
end
try do
"This should never be tested" = refute 2 == 2.0
rescue
error in [ExUnit.AssertionError] ->
"Refute with == failed" = error.message
2 = error.left
2.0 = error.right
end
end
test "assert in delta" do
true = assert_in_delta(1.1, 1.2, 0.2)
end
test "assert in delta raises when passing a negative delta" do
assert_raise ArgumentError, fn ->
assert_in_delta(1.1, 1.2, -0.2)
end
end
test "assert in delta works with equal values and a delta of zero" do
assert_in_delta(10, 10, 0)
end
test "assert in delta error" do
"This should never be tested" = assert_in_delta(10, 12, 1)
rescue
error in [ExUnit.AssertionError] ->
"Expected the difference between 10 and 12 (2) to be less than or equal to 1" =
error.message
end
test "assert in delta with message" do
"This should never be tested" = assert_in_delta(10, 12, 1, "test message")
rescue
error in [ExUnit.AssertionError] ->
"test message" = error.message
end
test "refute in delta" do
false = refute_in_delta(1.1, 1.5, 0.2)
end
test "refute in delta error" do
"This should never be tested" = refute_in_delta(10, 11, 2)
rescue
error in [ExUnit.AssertionError] ->
"Expected the difference between 10 and 11 (1) to be more than 2" = error.message
end
test "refute in delta with message" do
"This should never be tested" = refute_in_delta(10, 11, 2, "test message")
rescue
error in [ExUnit.AssertionError] ->
"test message (difference between 10 and 11 is less than 2)" = error.message
end
test "catch_throw with no throw" do
catch_throw(1)
rescue
error in [ExUnit.AssertionError] ->
"Expected to catch throw, got nothing" = error.message
end
test "catch_error with no error" do
catch_error(1)
rescue
error in [ExUnit.AssertionError] ->
"Expected to catch error, got nothing" = error.message
end
test "catch_exit with no exit" do
catch_exit(1)
rescue
error in [ExUnit.AssertionError] ->
"Expected to catch exit, got nothing" = error.message
end
test "catch_throw with throw" do
1 = catch_throw(throw(1))
end
test "catch_exit with exit" do
1 = catch_exit(exit(1))
end
test "catch_error with error" do
:function_clause = catch_error(List.flatten(1))
end
test "flunk" do
"This should never be tested" = flunk()
rescue
error in [ExUnit.AssertionError] ->
"Flunked!" = error.message
end
test "flunk with message" do
"This should never be tested" = flunk("This should raise an error")
rescue
error in [ExUnit.AssertionError] ->
"This should raise an error" = error.message
end
test "flunk with wrong argument type" do
"This should never be tested" = flunk(["flunk takes a binary, not a list"])
rescue
error ->
"no function clause matching in ExUnit.Assertions.flunk/1" =
FunctionClauseError.message(error)
end
test "AssertionError.message/1 is nicely formatted" do
assert :a = :b
rescue
error in [ExUnit.AssertionError] ->
"""
match (=) failed
code: assert :a = :b
right: :b
""" = Exception.message(error)
end
defp ok(val), do: {:ok, val}
defp error(val), do: {:error, val}
defp not_equal(left, right), do: left != right
end
| 26.821128 | 123 | 0.618566 |
799259bf07a2879687ae3c110ef2984b03efe0c1 | 6,560 | ex | Elixir | lib/policr_mini_bot/plugs/handle_user_joined_cleanup_plug.ex | WOCyo/policr-mini | 23e5e9f927d083cba5993f39e9f122e3a0ad1be2 | [
"MIT"
] | null | null | null | lib/policr_mini_bot/plugs/handle_user_joined_cleanup_plug.ex | WOCyo/policr-mini | 23e5e9f927d083cba5993f39e9f122e3a0ad1be2 | [
"MIT"
] | null | null | null | lib/policr_mini_bot/plugs/handle_user_joined_cleanup_plug.ex | WOCyo/policr-mini | 23e5e9f927d083cba5993f39e9f122e3a0ad1be2 | [
"MIT"
] | null | null | null | defmodule PolicrMiniBot.HandleUserJoinedCleanupPlug do
@moduledoc """
处理新用户加入。
"""
# TODO: 修改模块含义并迁移代码。因为设计改动,此 `:message_handler` 已无实际验证处理流程,仅作删除消息之用。
use PolicrMiniBot, plug: :message_handler
alias PolicrMini.{Logger, Chats}
alias PolicrMini.Chats.Scheme
alias PolicrMini.Schema.Verification
alias PolicrMini.VerificationBusiness
alias PolicrMiniBot.Worker
# 过期时间:15 分钟
@expired_seconds 60 * 15
@doc """
检查消息中包含的新加入用户是否有效。
## 以下情况皆不匹配
- 群组未接管。
除此之外包含新成员的消息都将匹配。
"""
@impl true
def match(_message, %{takeovered: false} = state), do: {:nomatch, state}
@impl true
def match(%{new_chat_members: nil} = _message, state), do: {:nomatch, state}
@impl true
def match(_message, state), do: {:match, state}
@doc """
删除进群服务消息。
"""
@impl true
def handle(message, state) do
%{chat: %{id: chat_id}} = message
# TOD0: 将 scheme 的获取放在一个独立的 plug 中,通过状态传递。
case Chats.fetch_scheme(chat_id) do
{:ok, scheme} ->
service_message_cleanup = scheme.service_message_cleanup || default!(:smc) || []
if Enum.member?(service_message_cleanup, :joined) do
# 删除服务消息。
Worker.async_delete_message(chat_id, message.message_id)
end
end
{:ok, %{state | done: true, deleted: true}}
end
# 处理单个新成员的加入。
def handle_one(chat_id, new_chat_member, date, scheme, state) do
joined_datetime =
case date |> DateTime.from_unix() do
{:ok, datetime} -> datetime
_ -> DateTime.utc_now()
end
entrance = scheme.verification_entrance || default!(:ventrance)
mode = scheme.verification_mode || default!(:vmode)
occasion = scheme.verification_occasion || default!(:voccasion)
seconds = scheme.seconds || default!(:vseconds)
if DateTime.diff(DateTime.utc_now(), joined_datetime) >= @expired_seconds do
# 处理过期验证
handle_expired(entrance, chat_id, new_chat_member, state)
else
# 异步限制新用户
async(fn -> restrict_chat_member(chat_id, new_chat_member.id) end)
handle_it(mode, entrance, occasion, seconds, chat_id, new_chat_member, state)
end
end
@doc """
处理过期验证。
当前仅限制用户,并不发送验证消息。
"""
@spec handle_expired(atom, integer, map, State.t()) :: {:error, State.t()} | {:ok, State.t()}
def handle_expired(entrance, chat_id, new_chat_member, state) do
verification_params = %{
chat_id: chat_id,
target_user_id: new_chat_member.id,
target_user_name: fullname(new_chat_member),
target_user_language_code: new_chat_member.language_code,
entrance: entrance,
seconds: 0,
status: :expired
}
case VerificationBusiness.fetch(verification_params) do
{:ok, _} ->
# 计数器自增(验证总数)
PolicrMini.Counter.increment(:verification_total)
# 异步限制新用户
async(fn -> restrict_chat_member(chat_id, new_chat_member.id) end)
{:ok, state}
e ->
Logger.unitized_error("Verification acquisition",
chat_id: chat_id,
user_id: new_chat_member.id,
returns: e
)
{:error, state}
end
end
@doc """
统一入口 + 私聊方案的细节实现。
"""
def handle_it(_, :unity, :private, seconds, chat_id, new_chat_member, state) do
verification_params = %{
chat_id: chat_id,
target_user_id: new_chat_member.id,
target_user_name: fullname(new_chat_member),
target_user_language_code: new_chat_member.language_code,
entrance: :unity,
seconds: seconds,
status: :waiting
}
with {:ok, verification} <- VerificationBusiness.fetch(verification_params),
{:ok, scheme} <- Chats.fetch_scheme(chat_id),
{text, markup} <- make_verify_content(verification, scheme, seconds),
{:ok, reminder_message} <-
Cleaner.send_verification_message(chat_id, text,
reply_markup: markup,
parse_mode: "MarkdownV2ToHTML"
),
{:ok, _} <-
VerificationBusiness.update(verification, %{message_id: reminder_message.message_id}) do
# 计数器自增(验证总数)
PolicrMini.Counter.increment(:verification_total)
# 异步延迟处理超时
Worker.async_terminate_validation(verification, scheme, seconds)
{:ok, %{state | done: true, deleted: true}}
else
e ->
Logger.unitized_error("Verification entrance creation", chat_id: chat_id, returns: e)
text =
t("errors.verification_created_failed", %{mentioned_user: mention(new_chat_member)})
send_message(chat_id, text)
{:error, state}
end
end
@doc """
生成验证消息。
注意:此函数需要在验证记录创建以后调用,否则会出现不正确的等待验证人数。
因为当前默认统一验证入口的关系,此函数生成的是入口消息而不是验证消息。
"""
@spec make_verify_content(Verification.t(), Scheme.t(), integer) ::
{String.t(), InlineKeyboardMarkup.t()}
def make_verify_content(verification, scheme, seconds)
when is_struct(verification, Verification) and is_struct(scheme, Scheme) do
%{chat_id: chat_id, target_user_id: target_user_id, target_user_name: target_user_name} =
verification
new_chat_member = %{id: target_user_id, fullname: target_user_name}
# 读取等待验证的人数并根据人数分别响应不同的文本内容
waiting_count = VerificationBusiness.get_unity_waiting_count(chat_id)
make_unity_content(chat_id, new_chat_member, waiting_count, scheme, seconds)
end
@doc """
生成统一验证入口消息。
参数 `user` 需要满足 `PolicrMiniBot.Helper.fullname/1` 函数子句的匹配。
"""
@spec make_unity_content(
integer,
PolicrMiniBot.Helper.mention_user(),
integer,
Scheme.t(),
integer
) ::
{String.t(), InlineKeyboardMarkup.t()}
def make_unity_content(chat_id, user, waiting_count, scheme, seconds)
when is_struct(scheme, Scheme) do
# 读取等待验证的人数并根据人数分别响应不同的文本内容
mention_scheme = scheme.mention_text || default!(:mention_scheme)
text =
if waiting_count == 1,
do:
t("verification.unity.single_waiting", %{
mentioned_user: build_mention(user, mention_scheme),
seconds: seconds
}),
else:
t("verification.unity.multiple_waiting", %{
mentioned_user: build_mention(user, mention_scheme),
remaining_count: waiting_count - 1,
seconds: seconds
})
markup = %InlineKeyboardMarkup{
inline_keyboard: [
[
%InlineKeyboardButton{
text: t("buttons.verification.click_here"),
url: "https://t.me/#{bot_username()}?start=verification_v1_#{chat_id}"
}
]
]
}
{text, markup}
end
end
| 28.898678 | 99 | 0.650915 |
79925a30813c8e73b1efec42e285edc6d9118ded | 313 | ex | Elixir | lib/elixir_playground/models/snippet.ex | slogsdon/elixir_playground | cabab7a377a814a8741a08fe7b1948b559343050 | [
"MIT"
] | 3 | 2015-01-28T06:08:59.000Z | 2015-11-05T02:40:18.000Z | lib/elixir_playground/models/snippet.ex | slogsdon/elixir_playground | cabab7a377a814a8741a08fe7b1948b559343050 | [
"MIT"
] | null | null | null | lib/elixir_playground/models/snippet.ex | slogsdon/elixir_playground | cabab7a377a814a8741a08fe7b1948b559343050 | [
"MIT"
] | null | null | null | defmodule ElixirPlayground.Models.Snippet do
use Ecto.Model
# Take a look at https://github.com/elixir-lang/ecto#models
# for information about defining fields for your model.
queryable "snippet" do
field :key, :string
field :snippet, :string
field :created_at, :datetime
end
end
| 26.083333 | 61 | 0.70607 |
7992bf262c134c9c9d2104def0402175bd5d5773 | 291 | ex | Elixir | elixir/elixir-sips/samples/ecto_test/lib/ecto_test.ex | afronski/playground-erlang | 6ac4b58b2fd717260c22a33284547d44a9b5038e | [
"MIT"
] | 2 | 2015-12-09T02:16:51.000Z | 2021-07-26T22:53:43.000Z | elixir/elixir-sips/samples/ecto_test/lib/ecto_test.ex | afronski/playground-erlang | 6ac4b58b2fd717260c22a33284547d44a9b5038e | [
"MIT"
] | null | null | null | elixir/elixir-sips/samples/ecto_test/lib/ecto_test.ex | afronski/playground-erlang | 6ac4b58b2fd717260c22a33284547d44a9b5038e | [
"MIT"
] | 1 | 2016-05-08T18:40:31.000Z | 2016-05-08T18:40:31.000Z | defmodule EctoTest do
require Logger
use Application
def start(_type, _args) do
import Supervisor.Spec
children = [
supervisor(EctoTest.Supervisor, [])
]
Logger.debug "Application started."
Supervisor.start_link children, strategy: :one_for_one
end
end
| 17.117647 | 58 | 0.701031 |
7992c60645e6853e64d09146a03b9b71548bcac1 | 852 | ex | Elixir | lib/grouper/supervisor.ex | jvantuyl/grouper | f5b748347e0abaaacfa26c1f46e3e21292327c4e | [
"MIT"
] | null | null | null | lib/grouper/supervisor.ex | jvantuyl/grouper | f5b748347e0abaaacfa26c1f46e3e21292327c4e | [
"MIT"
] | null | null | null | lib/grouper/supervisor.ex | jvantuyl/grouper | f5b748347e0abaaacfa26c1f46e3e21292327c4e | [
"MIT"
] | null | null | null | defmodule Grouper.Supervisor do
@moduledoc """
supervisor process for `Grouper` application
"""
use Supervisor
# === API ===
@doc """
Starts application-level supervisor for Grouper.
Maintains global information that drives groups.
"""
@spec start_link(keyword(), keyword()) :: Supervisor.on_start()
def start_link(init_opts \\ [], opts \\ [name: __MODULE__]) do
Supervisor.start_link(__MODULE__, init_opts, opts)
end
# === Supervisor Callbacks ===
@spec init(keyword) :: {:ok, {:supervisor.sup_flags(), [:supervisor.child_spec()]}} | :ignore
@doc false
@impl true
def init(opts) do
children = [Grouper.Reaper]
opts = Keyword.put_new(opts, :strategy, :one_for_one)
:grouper_global_tab = :ets.new(:grouper_global_tab, [:named_table, :set, :public])
Supervisor.init(children, opts)
end
end
| 25.058824 | 95 | 0.678404 |
7992cb6b5a31bc5f26a22ebd9ff2dc75cb3cf227 | 5,554 | ex | Elixir | lib/scenic/component/button.ex | zacck/scenic | 5f2170b7fba63b0af597ddeb3107fb1ffb79c2fe | [
"Apache-2.0"
] | null | null | null | lib/scenic/component/button.ex | zacck/scenic | 5f2170b7fba63b0af597ddeb3107fb1ffb79c2fe | [
"Apache-2.0"
] | null | null | null | lib/scenic/component/button.ex | zacck/scenic | 5f2170b7fba63b0af597ddeb3107fb1ffb79c2fe | [
"Apache-2.0"
] | null | null | null | defmodule Scenic.Component.Button do
@moduledoc false
use Scenic.Component, has_children: false
alias Scenic.Graph
alias Scenic.Primitive
alias Scenic.ViewPort
alias Scenic.Primitive.Style.Theme
import Scenic.Primitives, only: [{:rrect, 3}, {:text, 3}, {:update_opts, 2}]
# import IEx
@default_width 80
@default_height 30
@default_radius 3
@default_font :roboto
@default_font_size 20
@default_alignment :center
# --------------------------------------------------------
def info(data) do
"""
#{IO.ANSI.red()}Button data must be a bitstring: initial_text
#{IO.ANSI.yellow()}Received: #{inspect(data)}
#{IO.ANSI.default_color()}
"""
end
# --------------------------------------------------------
def verify(text) when is_bitstring(text), do: {:ok, text}
def verify(_), do: :invalid_data
# --------------------------------------------------------
def init(text, opts) when is_bitstring(text) and is_list(opts) do
id = opts[:id]
styles = opts[:styles]
# theme is passed in as an inherited style
theme =
(styles[:theme] || Theme.preset(:primary))
|> Theme.normalize()
# get button specific styles
width = styles[:width] || @default_width
height = styles[:height] || @default_height
radius = styles[:radius] || @default_radius
font = styles[:button_font] || @default_font
font_size = styles[:button_font_size] || @default_font_size
alignment = styles[:alignment] || @default_alignment
# build the graph
graph =
Graph.build(font: font, font_size: font_size)
|> rrect({width, height, radius}, fill: theme.background, id: :btn)
|> do_aligned_text(alignment, text, theme.text, width, height)
# special case the dark and light themes to show an outline
graph = do_special_theme_outline(styles[:theme], graph, theme.border)
state = %{
graph: graph,
theme: theme,
pressed: false,
contained: false,
align: alignment,
id: id
}
push_graph(graph)
{:ok, state}
end
defp do_aligned_text(graph, :center, text, fill, width, height) do
text(graph, text,
fill: fill,
translate: {width / 2, height * 0.7},
text_align: :center,
id: :title
)
end
defp do_aligned_text(graph, :left, text, fill, _width, height) do
text(graph, text,
fill: fill,
translate: {8, height * 0.7},
text_align: :left,
id: :title
)
end
defp do_aligned_text(graph, :right, text, fill, width, height) do
text(graph, text,
fill: fill,
translate: {width - 8, height * 0.7},
text_align: :right,
id: :title
)
end
defp do_special_theme_outline(:dark, graph, border) do
Graph.modify(graph, :btn, &update_opts(&1, stroke: {1, border}))
end
defp do_special_theme_outline(:light, graph, border) do
Graph.modify(graph, :btn, &update_opts(&1, stroke: {1, border}))
end
defp do_special_theme_outline(_, graph, _border) do
graph
end
# --------------------------------------------------------
def handle_input(
{:cursor_enter, _uid},
_context,
%{
pressed: true
} = state
) do
state = Map.put(state, :contained, true)
update_color(state)
{:noreply, state}
end
# --------------------------------------------------------
def handle_input(
{:cursor_exit, _uid},
_context,
%{
pressed: true
} = state
) do
state = Map.put(state, :contained, false)
update_color(state)
{:noreply, state}
end
# --------------------------------------------------------
def handle_input({:cursor_button, {:left, :press, _, _}}, context, state) do
state =
state
|> Map.put(:pressed, true)
|> Map.put(:contained, true)
update_color(state)
ViewPort.capture_input(context, [:cursor_button, :cursor_pos])
{:noreply, state}
end
# --------------------------------------------------------
def handle_input(
{:cursor_button, {:left, :release, _, _}},
context,
%{pressed: pressed, contained: contained, id: id} = state
) do
state = Map.put(state, :pressed, false)
update_color(state)
ViewPort.release_input(context, [:cursor_button, :cursor_pos])
if pressed && contained do
send_event({:click, id})
end
{:noreply, state}
end
# --------------------------------------------------------
def handle_input(_event, _context, state) do
{:noreply, state}
end
# ============================================================================
# internal utilities
defp update_color(%{graph: graph, theme: theme, pressed: false, contained: false}) do
Graph.modify(graph, :btn, fn p ->
p
|> Primitive.put_style(:fill, theme.background)
end)
|> push_graph()
end
defp update_color(%{graph: graph, theme: theme, pressed: false, contained: true}) do
Graph.modify(graph, :btn, fn p ->
p
|> Primitive.put_style(:fill, theme.background)
end)
|> push_graph()
end
defp update_color(%{graph: graph, theme: theme, pressed: true, contained: false}) do
Graph.modify(graph, :btn, fn p ->
p
|> Primitive.put_style(:fill, theme.background)
end)
|> push_graph()
end
defp update_color(%{graph: graph, theme: theme, pressed: true, contained: true}) do
Graph.modify(graph, :btn, fn p ->
Primitive.put_style(p, :fill, theme.active)
end)
|> push_graph()
end
end
| 26.075117 | 87 | 0.561577 |
7992f6263aff900d536f50370e70a17d153b33f0 | 329 | ex | Elixir | web/json_api.ex | mmcc/exq_ui | e4d98b341888faee834d1e4d033a1f3916bfb1e1 | [
"Apache-2.0"
] | 1 | 2021-01-20T04:13:01.000Z | 2021-01-20T04:13:01.000Z | web/json_api.ex | mmcc/exq_ui | e4d98b341888faee834d1e4d033a1f3916bfb1e1 | [
"Apache-2.0"
] | null | null | null | web/json_api.ex | mmcc/exq_ui | e4d98b341888faee834d1e4d033a1f3916bfb1e1 | [
"Apache-2.0"
] | 3 | 2021-04-07T14:53:29.000Z | 2021-04-07T14:53:49.000Z | defmodule JsonApi do
def init(opts), do: opts
def call(conn, opts) do
jsonify(conn, opts, opts[:on] || "api")
end
def jsonify(%Plug.Conn{path_info: [on | _path]} = conn, _opts, on) do
conn |>
Plug.Conn.put_resp_header("content-type", "application/json")
end
def jsonify(conn, _opts, _on), do: conn
end
| 21.933333 | 71 | 0.647416 |
799333424fcfcaed5778e2d3e92af9f3e7b0fad0 | 1,274 | exs | Elixir | test/auto_api/capabilities/messaging_capability_test.exs | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | 4 | 2018-01-19T16:11:10.000Z | 2019-12-13T16:35:10.000Z | test/auto_api/capabilities/messaging_capability_test.exs | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | 5 | 2020-07-16T07:20:21.000Z | 2021-09-22T10:18:04.000Z | test/auto_api/capabilities/messaging_capability_test.exs | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | 1 | 2021-02-17T18:36:13.000Z | 2021-02-17T18:36:13.000Z | # AutoAPI
# The MIT License
#
# Copyright (c) 2018- High-Mobility GmbH (https://high-mobility.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
defmodule AutoApi.MessagingCapabilityTest do
use ExUnit.Case, async: true
doctest AutoApi.MessagingCapability
end
| 47.185185 | 79 | 0.781005 |
799333d34d59d5e17145d9d222ff731e7b1a4a9e | 13,517 | ex | Elixir | lib/ash/resource/dsl.ex | kyle5794/ash | 82023da84400366d07001593673d1aaa2a418803 | [
"MIT"
] | null | null | null | lib/ash/resource/dsl.ex | kyle5794/ash | 82023da84400366d07001593673d1aaa2a418803 | [
"MIT"
] | null | null | null | lib/ash/resource/dsl.ex | kyle5794/ash | 82023da84400366d07001593673d1aaa2a418803 | [
"MIT"
] | null | null | null | defmodule Ash.Resource.Dsl do
@moduledoc """
The built in resource DSL. The core DSL components of a resource are:
* attributes - `attributes/1`
* relationships - `relationships/1`
* actions - `actions/1`
* validations - `validations/1`
* aggregates - `aggregates/1`
* calculations - `calculations/1`
"""
@attribute %Ash.Dsl.Entity{
name: :attribute,
describe: """
Declares an attribute on the resource
Type can be either a built in type (see `Ash.Type`) for more, or a module
implementing the `Ash.Type` behaviour.
""",
examples: [
"attribute :first_name, :string, primary_key?: true"
],
transform: {Ash.Resource.Attribute, :transform, []},
target: Ash.Resource.Attribute,
args: [:name, :type],
schema: Ash.Resource.Attribute.attribute_schema()
}
@create_timestamp %Ash.Dsl.Entity{
name: :create_timestamp,
describe: """
Declares a non-writable attribute with a create default of `&DateTime.utc_now/0`
""",
examples: [
"create_timestamp :inserted_at"
],
transform: {Ash.Resource.Attribute, :transform, []},
target: Ash.Resource.Attribute,
args: [:name],
schema: Ash.Resource.Attribute.create_timestamp_schema()
}
@update_timestamp %Ash.Dsl.Entity{
name: :update_timestamp,
describe: """
Declares a non-writable attribute with a create and update default of `&DateTime.utc_now/0`
""",
examples: [
"update_timestamp :inserted_at"
],
transform: {Ash.Resource.Attribute, :transform, []},
target: Ash.Resource.Attribute,
schema: Ash.Resource.Attribute.update_timestamp_schema(),
args: [:name]
}
@attributes %Ash.Dsl.Section{
name: :attributes,
describe: """
A section for declaring attributes on the resource.
Attributes are fields on an instance of a resource. The two required
pieces of knowledge are the field name, and the type.
""",
entities: [
@attribute,
@create_timestamp,
@update_timestamp
]
}
@has_one %Ash.Dsl.Entity{
name: :has_one,
describe: """
Declares a has_one relationship. In a relationsal database, the foreign key would be on the *other* table.
Generally speaking, a `has_one` also implies that the destination table is unique on that foreign key.
""",
examples: [
"""
# In a resource called `Word`
has_one :dictionary_entry, DictionaryEntry,
source_field: :text,
destination_field: :word_text
"""
],
target: Ash.Resource.Relationships.HasOne,
schema: Ash.Resource.Relationships.HasOne.opt_schema(),
args: [:name, :destination]
}
@has_many %Ash.Dsl.Entity{
name: :has_many,
describe: """
Declares a has_many relationship. There can be any number of related entities.
""",
examples: [
"""
# In a resource called `Word`
has_many :definitions, DictionaryDefinition,
source_field: :text,
destination_field: :word_text
"""
],
target: Ash.Resource.Relationships.HasMany,
schema: Ash.Resource.Relationships.HasMany.opt_schema(),
args: [:name, :destination]
}
@many_to_many %Ash.Dsl.Entity{
name: :many_to_many,
describe: """
Declares a many_to_many relationship. Many to many relationships require a join table.
A join table is typically a table who's primary key consists of one foreign key to each resource.
""",
examples: [
"""
# In a resource called `Word`
many_to_many :books, Book,
through: BookWord,
source_field: :text,
source_field_on_join_table: :word_text,
destination_field: :id,
destination_field_on_join_table: :book_id
"""
],
target: Ash.Resource.Relationships.ManyToMany,
schema: Ash.Resource.Relationships.ManyToMany.opt_schema(),
transform: {Ash.Resource.Relationships.ManyToMany, :transform, []},
args: [:name, :destination]
}
@belongs_to %Ash.Dsl.Entity{
name: :belongs_to,
describe: """
Declares a belongs_to relationship. In a relational database, the foreign key would be on the *source* table.
This creates a field on the resource with the corresponding name and type, unless `define_field?: false` is provided.
""",
examples: [
"""
# In a resource called `Word`
belongs_to :dictionary_entry, DictionaryEntry,
source_field: :text,
destination_field: :word_text
"""
],
target: Ash.Resource.Relationships.BelongsTo,
schema: Ash.Resource.Relationships.BelongsTo.opt_schema(),
args: [:name, :destination]
}
@relationships %Ash.Dsl.Section{
name: :relationships,
describe: """
A section for declaring relationships on the resource.
Relationships are a core component of resource oriented design. Many components of Ash
will use these relationships. A simple use case is side_loading (done via the `Ash.Query.load/2`).
""",
entities: [
@has_one,
@has_many,
@many_to_many,
@belongs_to
]
}
@change %Ash.Dsl.Entity{
name: :change,
describe: """
A change to be applied to the changeset after it is generated. They are run in order, from top to bottom.
To implement your own, see `Ash.Resource.Change`.
To use it, you can simply refer to the module and its options, like so:
`change {MyChange, foo: 1}`
But for readability, you may want to define a function elsewhere and import it,
so you can say something like:
`change my_change(1)`
For destroys, `changes` are not applied unless `soft?` is set to true.
""",
examples: [
"change relate_actor(:reporter)",
"change {MyCustomChange, :foo}"
],
target: Ash.Resource.Change,
transform: {Ash.Resource.Change, :transform, []},
schema: Ash.Resource.Change.schema(),
args: [:change]
}
@create %Ash.Dsl.Entity{
name: :create,
describe: """
Declares a `create` action. For calling this action, see the `Ash.Api` documentation.
""",
examples: [
"create :register, primary?: true"
],
target: Ash.Resource.Actions.Create,
schema: Ash.Resource.Actions.Create.opt_schema(),
entities: [
changes: [
@change
]
],
args: [:name]
}
@read %Ash.Dsl.Entity{
name: :read,
describe: """
Declares a `read` action. For calling this action, see the `Ash.Api` documentation.
## Pagination
#{NimbleOptions.docs(Ash.Resource.Actions.Read.pagination_schema())}
""",
examples: [
"read :read_all, primary?: true"
],
target: Ash.Resource.Actions.Read,
schema: Ash.Resource.Actions.Read.opt_schema(),
args: [:name]
}
@update %Ash.Dsl.Entity{
name: :update,
describe: """
Declares a `update` action. For calling this action, see the `Ash.Api` documentation.
""",
examples: [
"update :flag_for_review, primary?: true"
],
entities: [
changes: [
@change
]
],
target: Ash.Resource.Actions.Update,
schema: Ash.Resource.Actions.Update.opt_schema(),
args: [:name]
}
@destroy %Ash.Dsl.Entity{
name: :destroy,
describe: """
Declares a `destroy` action. For calling this action, see the `Ash.Api` documentation.
""",
examples: [
"destroy :soft_delete, primary?: true"
],
entities: [
changes: [
@change
]
],
target: Ash.Resource.Actions.Destroy,
schema: Ash.Resource.Actions.Destroy.opt_schema(),
args: [:name]
}
@actions %Ash.Dsl.Section{
name: :actions,
describe: """
A section for declaring resource actions.
All manipulation of data through the underlying data layer happens through actions.
There are four types of action: `create`, `read`, `update`, and `destroy`. You may
recognize these from the acronym `CRUD`. You can have multiple actions of the same
type, as long as they have different names. This is the primary mechanism for customizing
your resources to conform to your business logic. It is normal and expected to have
multiple actions of each type in a large application.
If you have multiple actions of the same type, one of them must be designated as the
primary action for that type, via: `primary?: true`. This tells the ash what to do
if an action of that type is requested, but no specific action name is given.
""",
imports: [
Ash.Resource.Change.Builtins
],
entities: [
@create,
@read,
@update,
@destroy
]
}
@identity %Ash.Dsl.Entity{
name: :identity,
describe: """
Represents a unique constraint on the resource.
Used for indicating that some set of attributes, calculations or aggregates uniquely identify a resource.
This will allow these fields to be passed to `c:Ash.Api.get/3`, e.g `get(Resource, [some_field: 10])`,
if all of the keys are filterable. Otherwise they are purely descriptive at the moment.
The primary key of the resource does not need to be listed as an identity.
""",
examples: [
"identity :name, [:name]",
"identity :full_name, [:first_name, :last_name]"
],
target: Ash.Resource.Identity,
schema: Ash.Resource.Identity.schema(),
args: [:name, :keys]
}
@identities %Ash.Dsl.Section{
name: :identities,
describe: """
Unique identifiers for the resource
""",
entities: [
@identity
]
}
@resource %Ash.Dsl.Section{
name: :resource,
describe: """
Resource-wide configuration
""",
sections: [
@identities
],
schema: [
description: [
type: :string,
doc: "A human readable description of the resource, to be used in generated documentation"
],
base_filter: [
type: :any,
doc: "A filter statement to be applied to any queries on the resource"
]
]
}
@validate %Ash.Dsl.Entity{
name: :validate,
describe: """
Declares a validation for creates and updates.
""",
examples: [
"validate {Mod, [foo: :bar]}",
"validate at_least_one_of_present([:first_name, :last_name])"
],
target: Ash.Resource.Validation,
schema: Ash.Resource.Validation.opt_schema(),
transform: {Ash.Resource.Validation, :transform, []},
args: [:validation]
}
@validations %Ash.Dsl.Section{
name: :validations,
describe: """
Declare validations prior to performing actions against the resource
""",
imports: [
Ash.Resource.Validation.Builtins
],
entities: [
@validate
]
}
@count %Ash.Dsl.Entity{
name: :count,
describe: """
Declares a named aggregate on the resource
""",
examples: [
"count :assigned_ticket_count, :reported_tickets, filter: [active: true]"
],
target: Ash.Resource.Aggregate,
args: [:name, :relationship_path],
schema: Ash.Resource.Aggregate.schema(),
auto_set_fields: [kind: :count]
}
@aggregates %Ash.Dsl.Section{
name: :aggregates,
describe: """
Declare named aggregates on the resource.
These are aggregates that can be loaded only by name using `Ash.Query.load/2`.
They are also available as top level fields on the resource.
""",
entities: [
@count
]
}
@argument %Ash.Dsl.Entity{
name: :argument,
describe: """
An argument to be passed into the calculation's arguments map
""",
examples: [
"argument :params, :map, default: %{}",
"argument :retries, :integer, allow_nil?: false"
],
target: Ash.Resource.Calculation.Argument,
args: [:name, :type],
schema: Ash.Resource.Calculation.Argument.schema(),
transform: {Ash.Resource.Calculation.Argument, :transform, []}
}
@calculation %Ash.Dsl.Entity{
name: :calculate,
describe: """
Declares a named calculation on the resource.
Takes a module that must adopt the `Ash.Calculation` behaviour. See that module
for more information.
""",
examples: [
"calculate :full_name, MyApp.MyResource.FullName",
"calculate :full_name, {MyApp.FullName, keys: [:first_name, :last_name]}",
"calculate :full_name, full_name([:first_name, :last_name])"
],
target: Ash.Resource.Calculation,
args: [:name, :calculation],
entities: [
arguments: [@argument]
],
schema: Ash.Resource.Calculation.schema()
}
@calculations %Ash.Dsl.Section{
name: :calculations,
describe: """
Declare named calculations on the resource.
These are calculations that can be loaded only by name using `Ash.Query.load/2`.
They are also available as top level fields on the resource.
""",
imports: [
Ash.Resource.Calculation.Builtins
],
entities: [
@calculation
]
}
@sections [
@attributes,
@relationships,
@actions,
@resource,
@validations,
@aggregates,
@calculations
]
@transformers [
Ash.Resource.Transformers.SetRelationshipSource,
Ash.Resource.Transformers.BelongsToAttribute,
Ash.Resource.Transformers.BelongsToSourceField,
Ash.Resource.Transformers.CreateJoinRelationship,
Ash.Resource.Transformers.CachePrimaryKey,
Ash.Resource.Transformers.SetPrimaryActions,
Ash.Resource.Transformers.ValidateActionTypesSupported,
Ash.Resource.Transformers.CountableActions
]
use Ash.Dsl.Extension,
sections: @sections,
transformers: @transformers
end
| 28.278243 | 121 | 0.648147 |
79936ed1f8e6ef52506079ef25ae7cfcf3000c2c | 480 | ex | Elixir | lib/hayago_web/router.ex | jeffkreeftmeijer/hayago | f80e3204efc70fb4c144a75952534ef143ab584d | [
"Apache-2.0"
] | 60 | 2019-06-14T02:38:33.000Z | 2022-02-27T22:08:36.000Z | lib/hayago_web/router.ex | jeffkreeftmeijer/hayago | f80e3204efc70fb4c144a75952534ef143ab584d | [
"Apache-2.0"
] | 2 | 2020-01-05T09:09:58.000Z | 2020-12-31T09:54:13.000Z | lib/hayago_web/router.ex | jeffkreeftmeijer/hayago | f80e3204efc70fb4c144a75952534ef143ab584d | [
"Apache-2.0"
] | 15 | 2019-06-14T02:38:39.000Z | 2021-10-30T21:53:08.000Z | defmodule HayagoWeb.Router do
use HayagoWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug Phoenix.LiveView.Flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", HayagoWeb do
pipe_through :browser
end
# Other scopes may use custom stacks.
# scope "/api", HayagoWeb do
# pipe_through :api
# end
end
| 18.461538 | 39 | 0.68125 |
799372b2a465e59817e015a13ea52de01120243d | 13,214 | ex | Elixir | test/support/factory.ex | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | 1 | 2019-05-07T15:03:54.000Z | 2019-05-07T15:03:54.000Z | test/support/factory.ex | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | test/support/factory.ex | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | defmodule MastaniServer.Support.Factory do
@moduledoc """
This module defines the mock data/func to be used by
tests that require insert some mock data to db.
for example you can db_insert(:user) to insert user into db
"""
import Helper.Utils, only: [done: 1]
alias MastaniServer.Repo
alias MastaniServer.{Accounts, CMS, Delivery}
defp mock_meta(:post) do
body = Faker.Lorem.sentence(%Range{first: 80, last: 120})
%{
title: String.slice(body, 1, 49),
body: body,
digest: String.slice(body, 1, 150),
length: String.length(body),
author: mock(:author),
views: Enum.random(0..2000),
communities: [
mock(:community),
mock(:community)
]
}
end
defp mock_meta(:video) do
desc = Faker.Lorem.sentence(%Range{first: 15, last: 60})
source_enum = ["youtube", "bilibil", "vimeo", "other"]
%{
title: String.slice(desc, 1, 49),
poster: Faker.Avatar.image_url(),
thumbnil: Faker.Avatar.image_url(),
desc: String.slice(desc, 1, 49),
duration: "03:30",
duration_sec: Enum.random(300..12_000),
source: source_enum |> Enum.at(Enum.random(0..(length(source_enum) - 1))),
link: "http://www.youtube.com/video/1",
original_author: "mydearxym",
original_author_link: "http://www.youtube.com/user/1",
author: mock(:author),
views: Enum.random(0..2000),
publish_at: Timex.today() |> Timex.to_datetime(),
communities: [
mock(:community),
mock(:community)
]
}
end
defp mock_meta(:repo) do
desc = Faker.Lorem.sentence(%Range{first: 15, last: 60})
%{
title: String.slice(desc, 1, 49),
owner_name: "coderplanets",
owner_url: "http://www.github.com/coderplanets",
repo_url: "http://www.github.com/coderplanets//coderplanets_server",
desc: desc,
homepage_url: "http://www.github.com/coderplanets",
readme: desc,
issues_count: Enum.random(0..2000),
prs_count: Enum.random(0..2000),
fork_count: Enum.random(0..2000),
star_count: Enum.random(0..2000),
watch_count: Enum.random(0..2000),
license: "MIT",
release_tag: "v22",
primary_language: %{
name: "javascript",
color: "tomato"
},
contributors: [
mock_meta(:repo_contributor),
mock_meta(:repo_contributor)
],
author: mock(:author),
views: Enum.random(0..2000),
communities: [
mock(:community),
mock(:community)
]
}
end
defp mock_meta(:wiki) do
%{
community: mock(:community),
readme: Faker.Lorem.sentence(%Range{first: 15, last: 60}),
last_sync: Timex.today() |> Timex.to_datetime(),
contributors: [
mock_meta(:github_contributor),
mock_meta(:github_contributor),
mock_meta(:github_contributor)
]
}
end
defp mock_meta(:cheatsheet) do
mock_meta(:wiki)
end
defp mock_meta(:repo_contributor) do
%{
avatar: Faker.Avatar.image_url(),
html_url: Faker.Avatar.image_url(),
htmlUrl: Faker.Avatar.image_url(),
nickname: "mydearxym2"
}
end
defp mock_meta(:github_contributor) do
unique_num = System.unique_integer([:positive, :monotonic])
%{
github_id: "#{unique_num}-#{Faker.Lorem.sentence(%Range{first: 5, last: 10})}",
avatar: Faker.Avatar.image_url(),
html_url: Faker.Avatar.image_url(),
nickname: "mydearxym2",
bio: Faker.Lorem.sentence(%Range{first: 15, last: 60}),
location: "location #{unique_num}",
company: Faker.Company.name()
}
end
defp mock_meta(:job) do
body = Faker.Lorem.sentence(%Range{first: 80, last: 120})
salary_enum = ["2k以下", "2k-5k", "5k-10k", "10k-15k", "15k-25k", "20k-50k", "50k以上"]
exp_enum = ["不限", "应届", "3年以下", "3-5年", "5-10年", "10年以上"]
education_enum = ["不限", "大专", "本科", "硕士", "博士"]
field_enum = ["互联网", "电子商务", "金融", "企业服务", "教育", "游戏", "O2O", "区块链"]
finance_enum = ["未融资", "天使轮", "A轮", "B轮", "C轮", "D轮以上", "已上市", "不需融资"]
scale_enum = ["少于15人", "15-50人", "50-150人", "150-500人", "500-2000人", "2000人以上"]
%{
title: String.slice(body, 1, 49),
company: Faker.Company.name(),
company_logo: Faker.Avatar.image_url(),
body: body,
desc: "活少, 美女多",
digest: String.slice(body, 1, 150),
length: String.length(body),
author: mock(:author),
views: Enum.random(0..2000),
salary: salary_enum |> Enum.at(Enum.random(0..(length(salary_enum) - 1))),
exp: exp_enum |> Enum.at(Enum.random(0..(length(exp_enum) - 1))),
education: education_enum |> Enum.at(Enum.random(0..(length(education_enum) - 1))),
field: field_enum |> Enum.at(Enum.random(0..(length(field_enum) - 1))),
finance: finance_enum |> Enum.at(Enum.random(0..(length(finance_enum) - 1))),
scale: scale_enum |> Enum.at(Enum.random(0..(length(scale_enum) - 1))),
communities: [
mock(:community)
]
}
end
defp mock_meta(:comment) do
body = Faker.Lorem.sentence(%Range{first: 30, last: 80})
%{body: body}
end
defp mock_meta(:mention) do
unique_num = System.unique_integer([:positive, :monotonic])
%{
from_user: mock(:user),
to_user: mock(:user),
source_id: "1",
source_type: "post",
source_preview: "source_preview #{unique_num}."
}
end
defp mock_meta(:author) do
%{role: "normal", user: mock(:user)}
end
defp mock_meta(:communities_threads) do
%{community_id: 1, thread_id: 1}
end
defp mock_meta(:thread) do
unique_num = System.unique_integer([:positive, :monotonic])
%{title: "thread #{unique_num}", raw: "thread #{unique_num}", index: :rand.uniform(20)}
end
defp mock_meta(:community) do
unique_num = System.unique_integer([:positive, :monotonic])
random_num = Enum.random(0..2000)
title = "community_#{random_num}_#{unique_num}"
%{
title: title,
aka: title,
desc: "community desc",
raw: title,
logo: "https://coderplanets.oss-cn-beijing.aliyuncs.com/icons/pl/elixir.svg",
author: mock(:user)
}
end
defp mock_meta(:category) do
unique_num = System.unique_integer([:positive, :monotonic])
%{
title: "category#{unique_num}",
raw: "category#{unique_num}",
author: mock(:author)
}
end
defp mock_meta(:tag) do
unique_num = System.unique_integer([:positive, :monotonic])
%{
title: "#{Faker.Pizza.cheese()} #{unique_num}",
thread: "POST",
color: "YELLOW",
# community: Faker.Pizza.topping(),
community: mock(:community),
author: mock(:author)
# user_id: 1
}
end
defp mock_meta(:sys_notification) do
unique_num = System.unique_integer([:positive, :monotonic])
%{
source_id: "#{unique_num}",
source_title: "#{Faker.Pizza.cheese()}",
source_type: "post",
source_preview: "#{Faker.Pizza.cheese()}"
}
end
defp mock_meta(:user) do
unique_num = System.unique_integer([:positive, :monotonic])
%{
# username: "#{Faker.Name.first_name()} #{unique_num}",
login: "#{Faker.Name.first_name()}#{unique_num}" |> String.downcase(),
nickname: "#{Faker.Name.first_name()}#{unique_num}",
bio: Faker.Lorem.Shakespeare.romeo_and_juliet(),
avatar: Faker.Avatar.image_url()
}
end
defp mock_meta(:repo_contributor) do
%{
avatar: Faker.Avatar.image_url(),
html_url: Faker.Avatar.image_url(),
htmlUrl: Faker.Avatar.image_url(),
nickname: "mydearxym2"
}
end
defp mock_meta(:github_profile) do
unique_num = System.unique_integer([:positive, :monotonic])
%{
id: "#{Faker.Name.first_name()} #{unique_num}",
login: "#{Faker.Name.first_name()}#{unique_num}",
github_id: "#{unique_num + 1000}",
node_id: "#{unique_num + 2000}",
access_token: "#{unique_num + 3000}",
bio: Faker.Lorem.Shakespeare.romeo_and_juliet(),
company: Faker.Company.name(),
location: "chengdu",
email: Faker.Internet.email(),
avatar_url: Faker.Avatar.image_url(),
html_url: Faker.Avatar.image_url(),
followers: unique_num * unique_num,
following: unique_num * unique_num * unique_num
}
end
defp mock_meta(:bill) do
%{
payment_usage: "donate",
payment_method: "alipay",
amount: 51.2
}
end
def mock_attrs(_, attrs \\ %{})
def mock_attrs(:user, attrs), do: mock_meta(:user) |> Map.merge(attrs)
def mock_attrs(:author, attrs), do: mock_meta(:author) |> Map.merge(attrs)
def mock_attrs(:post, attrs), do: mock_meta(:post) |> Map.merge(attrs)
def mock_attrs(:video, attrs), do: mock_meta(:video) |> Map.merge(attrs)
def mock_attrs(:repo, attrs), do: mock_meta(:repo) |> Map.merge(attrs)
def mock_attrs(:job, attrs), do: mock_meta(:job) |> Map.merge(attrs)
def mock_attrs(:community, attrs), do: mock_meta(:community) |> Map.merge(attrs)
def mock_attrs(:thread, attrs), do: mock_meta(:thread) |> Map.merge(attrs)
def mock_attrs(:mention, attrs), do: mock_meta(:mention) |> Map.merge(attrs)
def mock_attrs(:wiki, attrs), do: mock_meta(:wiki) |> Map.merge(attrs)
def mock_attrs(:cheatsheet, attrs), do: mock_meta(:cheatsheet) |> Map.merge(attrs)
def mock_attrs(:github_contributor, attrs),
do: mock_meta(:github_contributor) |> Map.merge(attrs)
def mock_attrs(:communities_threads, attrs),
do: mock_meta(:communities_threads) |> Map.merge(attrs)
def mock_attrs(:tag, attrs), do: mock_meta(:tag) |> Map.merge(attrs)
def mock_attrs(:sys_notification, attrs), do: mock_meta(:sys_notification) |> Map.merge(attrs)
def mock_attrs(:category, attrs), do: mock_meta(:category) |> Map.merge(attrs)
def mock_attrs(:github_profile, attrs), do: mock_meta(:github_profile) |> Map.merge(attrs)
def mock_attrs(:bill, attrs), do: mock_meta(:bill) |> Map.merge(attrs)
# NOTICE: avoid Recursive problem
# bad example:
# mismatch mismatch
# | |
# defp mock(:user), do: Accounts.User |> struct(mock_meta(:community))
# this line of code will cause SERIOUS Recursive problem
defp mock(:post), do: CMS.Post |> struct(mock_meta(:post))
defp mock(:video), do: CMS.Video |> struct(mock_meta(:video))
defp mock(:repo), do: CMS.Repo |> struct(mock_meta(:repo))
defp mock(:job), do: CMS.Job |> struct(mock_meta(:job))
defp mock(:wiki), do: CMS.CommunityWiki |> struct(mock_meta(:wiki))
defp mock(:cheatsheet), do: CMS.CommunityCheatsheet |> struct(mock_meta(:cheatsheet))
defp mock(:comment), do: CMS.Comment |> struct(mock_meta(:comment))
defp mock(:mention), do: Delivery.Mention |> struct(mock_meta(:mention))
defp mock(:author), do: CMS.Author |> struct(mock_meta(:author))
defp mock(:category), do: CMS.Category |> struct(mock_meta(:category))
defp mock(:tag), do: CMS.Tag |> struct(mock_meta(:tag))
defp mock(:sys_notification),
do: Delivery.SysNotification |> struct(mock_meta(:sys_notification))
defp mock(:user), do: Accounts.User |> struct(mock_meta(:user))
defp mock(:community), do: CMS.Community |> struct(mock_meta(:community))
defp mock(:thread), do: CMS.Thread |> struct(mock_meta(:thread))
defp mock(:communities_threads),
do: CMS.CommunityThread |> struct(mock_meta(:communities_threads))
defp mock(factory_name, attributes) do
factory_name |> mock() |> struct(attributes)
end
# """
# not use changeset because in test we may insert some attrs which not in schema
# like: views, insert/update ... to test filter-sort,when ...
# """
def db_insert(factory_name, attributes \\ []) do
Repo.insert(mock(factory_name, attributes))
end
def db_insert_multi(factory_name, count \\ 2) do
results =
Enum.reduce(1..count, [], fn _, acc ->
{:ok, value} = db_insert(factory_name)
acc ++ [value]
end)
results |> done
end
alias MastaniServer.Accounts.User
def mock_sys_notification(count \\ 3) do
# {:ok, sys_notifications} = db_insert_multi(:sys_notification, count)
db_insert_multi(:sys_notification, count)
end
def mock_mentions_for(%User{id: _to_user_id} = user, count \\ 3) do
{:ok, users} = db_insert_multi(:user, count)
Enum.map(users, fn u ->
unique_num = System.unique_integer([:positive, :monotonic])
info = %{
community: "elixir",
source_id: "1",
source_title: "Title #{unique_num}",
source_type: "post",
source_preview: "preview #{unique_num}"
}
{:ok, _} = Delivery.mention_others(u, [%{id: user.id}], info)
end)
end
def mock_notifications_for(%User{id: _to_user_id} = user, count \\ 3) do
{:ok, users} = db_insert_multi(:user, count)
Enum.map(users, fn u ->
unique_num = System.unique_integer([:positive, :monotonic])
info = %{
source_id: "1",
source_title: "Title #{unique_num}",
source_type: "post",
source_preview: "preview #{unique_num}",
action: "like"
}
{:ok, _} = Delivery.notify_someone(u, user, info)
end)
end
end
| 31.840964 | 96 | 0.626684 |
79937353dd5c52437ea4a9c86c82c4d6bb223436 | 68 | exs | Elixir | test/test_helper.exs | Miradorn/absinthe_tutorial | fbe63be1d4acc92c7a85d4a04fd259ac6b5ef072 | [
"MIT"
] | 69 | 2017-11-15T17:26:57.000Z | 2022-01-25T16:37:40.000Z | test/test_helper.exs | Miradorn/absinthe_tutorial | fbe63be1d4acc92c7a85d4a04fd259ac6b5ef072 | [
"MIT"
] | 24 | 2020-01-27T20:44:40.000Z | 2020-10-29T05:48:31.000Z | test/test_helper.exs | Miradorn/absinthe_tutorial | fbe63be1d4acc92c7a85d4a04fd259ac6b5ef072 | [
"MIT"
] | 35 | 2017-11-12T22:04:10.000Z | 2021-07-22T18:00:39.000Z | ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(Blog.Repo, :manual)
| 13.6 | 50 | 0.75 |
7993b34ab0786f963ab13871713d197b7a87272c | 2,484 | exs | Elixir | mix.exs | dirkjanm/teslamate | 18beefbe694c74c1088ff6f87ef7b4c4190b451e | [
"MIT"
] | null | null | null | mix.exs | dirkjanm/teslamate | 18beefbe694c74c1088ff6f87ef7b4c4190b451e | [
"MIT"
] | null | null | null | mix.exs | dirkjanm/teslamate | 18beefbe694c74c1088ff6f87ef7b4c4190b451e | [
"MIT"
] | null | null | null | defmodule TeslaMate.MixProject do
use Mix.Project
def project do
[
app: :teslamate,
version: "1.14.1",
elixir: "~> 1.9",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
releases: releases(),
deps: deps(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test,
ci: :test
]
]
end
def application do
[
mod: {TeslaMate.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp deps do
[
{:phoenix, "~> 1.4"},
{:phoenix_pubsub, "~> 1.1"},
{:phoenix_ecto, "~> 4.0"},
{:ecto_sql, "~> 3.0"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:gen_state_machine, "~> 2.0"},
{:ecto_enum, "~> 1.0"},
{:phoenix_live_view, "~> 0.1"},
{:floki, ">= 0.0.0", only: :test},
{:tortoise, "~> 0.9"},
{:excoveralls, "~> 0.10", only: :test},
{:mojito, "~> 0.5"},
{:mint, "~> 1.0", override: true},
{:srtm, "~> 0.2"},
{:fuse, "~> 2.4"},
{:mock, "~> 0.3", only: :test},
{:castore, "~> 0.1"},
{:cachex, "~> 3.2"}
]
end
defp aliases do
[
setup: ["deps.get", "ecto.setup", &setup_yarn/1],
"ecto.setup": ["ecto.create", "ecto.migrate"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate", "test --no-start"],
ci: ["format --check-formatted", "test --raise"]
]
end
defp releases() do
[
teslamate: [
include_executables_for: [:unix],
applications: [runtime_tools: :permanent]
]
]
end
defp setup_yarn(_) do
cmd("yarn", ["install"], cd: "assets")
end
defp cmd(cmd, args, opts) do
opts = Keyword.merge([into: IO.stream(:stdio, :line), stderr_to_stdout: true], opts)
{_, result} = System.cmd(cmd, args, opts)
if result != 0 do
raise "Non-zero result (#{result}) from: #{cmd} #{Enum.map_join(args, " ", &inspect/1)}"
end
end
end
| 25.875 | 94 | 0.513285 |
7993e6e80e82868a54383abfd5d25eca1446e316 | 2,595 | ex | Elixir | clients/content/lib/google_api/content/v2/model/orders_update_line_item_shipping_details_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/content/lib/google_api/content/v2/model/orders_update_line_item_shipping_details_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/orders_update_line_item_shipping_details_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Content.V2.Model.OrdersUpdateLineItemShippingDetailsRequest do
@moduledoc """
## Attributes
* `deliverByDate` (*type:* `String.t`, *default:* `nil`) - Updated delivery by date, in ISO 8601 format. If not specified only ship by date is updated.
Provided date should be within 1 year timeframe and can not be a date in the past.
* `lineItemId` (*type:* `String.t`, *default:* `nil`) - The ID of the line item to set metadata. Either lineItemId or productId is required.
* `operationId` (*type:* `String.t`, *default:* `nil`) - The ID of the operation. Unique across all operations for a given order.
* `productId` (*type:* `String.t`, *default:* `nil`) - The ID of the product to set metadata. This is the REST ID used in the products service. Either lineItemId or productId is required.
* `shipByDate` (*type:* `String.t`, *default:* `nil`) - Updated ship by date, in ISO 8601 format. If not specified only deliver by date is updated.
Provided date should be within 1 year timeframe and can not be a date in the past.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:deliverByDate => String.t(),
:lineItemId => String.t(),
:operationId => String.t(),
:productId => String.t(),
:shipByDate => String.t()
}
field(:deliverByDate)
field(:lineItemId)
field(:operationId)
field(:productId)
field(:shipByDate)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.OrdersUpdateLineItemShippingDetailsRequest do
def decode(value, options) do
GoogleApi.Content.V2.Model.OrdersUpdateLineItemShippingDetailsRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.OrdersUpdateLineItemShippingDetailsRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.190476 | 191 | 0.719075 |
7993f229e06af9313cfcc3d2f1a996c82ff1929b | 572 | ex | Elixir | lib/tourney/tourney_game_supervisor.ex | zeroasterisk/wordza | e0cdf317855ce2959865e6551f32e85a7c6fc2e1 | [
"MIT"
] | null | null | null | lib/tourney/tourney_game_supervisor.ex | zeroasterisk/wordza | e0cdf317855ce2959865e6551f32e85a7c6fc2e1 | [
"MIT"
] | null | null | null | lib/tourney/tourney_game_supervisor.ex | zeroasterisk/wordza | e0cdf317855ce2959865e6551f32e85a7c6fc2e1 | [
"MIT"
] | null | null | null | defmodule Wordza.TourneyGameSupervisor do
@moduledoc """
This is the supervisor for the tourney processes
"""
use Supervisor
def start_link() do
Supervisor.start_link(__MODULE__, [], name: __MODULE__)
end
def init(_) do
children = [
worker(Wordza.TourneyGameWorker, [], restart: :temporary)
]
supervise(children, strategy: :simple_one_for_one)
end
@doc """
Registers a new worker, and creates the worker process
"""
def register(worker_name) do
{:ok, _pid} = Supervisor.start_child(__MODULE__, [worker_name])
end
end
| 22.88 | 67 | 0.695804 |
79941f9f0a7c3eb34f65469e621430b008c6feaa | 1,858 | ex | Elixir | lib/nerves_hub_cli/certificate.ex | pojiro/nerves_hub_cli | 7b0f92b253078302b6c66f7371a716d1e366d6d0 | [
"Apache-2.0"
] | 28 | 2018-08-15T02:17:26.000Z | 2021-11-25T11:14:24.000Z | lib/nerves_hub_cli/certificate.ex | pojiro/nerves_hub_cli | 7b0f92b253078302b6c66f7371a716d1e366d6d0 | [
"Apache-2.0"
] | 81 | 2018-08-13T17:14:14.000Z | 2021-09-11T13:08:29.000Z | lib/nerves_hub_cli/certificate.ex | pojiro/nerves_hub_cli | 7b0f92b253078302b6c66f7371a716d1e366d6d0 | [
"Apache-2.0"
] | 13 | 2018-09-14T12:47:26.000Z | 2022-02-03T07:28:41.000Z | defmodule NervesHubCLI.Certificate do
import X509.Certificate.Extension
alias X509.Certificate.{Template, Validity}
@user_validity_years 1
@device_validity_years 31
@serial_number_bytes 20
@hash :sha256
def device_template(validity_years \\ @device_validity_years) do
validity_years = validity_years || @device_validity_years
%Template{
serial: {:random, @serial_number_bytes},
validity: years(validity_years),
hash: @hash,
extensions: [
basic_constraints: basic_constraints(false),
key_usage: key_usage([:digitalSignature, :keyEncipherment]),
ext_key_usage: ext_key_usage([:clientAuth]),
subject_key_identifier: true,
authority_key_identifier: true
]
}
|> Template.new()
end
def user_template(validity_years \\ @user_validity_years) do
validity_years = validity_years || @user_validity_years
%Template{
serial: {:random, @serial_number_bytes},
validity: years(validity_years),
hash: @hash,
extensions: [
basic_constraints: basic_constraints(false),
key_usage: key_usage([:digitalSignature, :keyEncipherment]),
ext_key_usage: ext_key_usage([:clientAuth]),
subject_key_identifier: true,
authority_key_identifier: true
]
}
|> Template.new()
end
# Helpers
defp backdate(datetime, hours) do
datetime
|> DateTime.to_unix()
|> Kernel.-(hours * 60 * 60)
|> DateTime.from_unix!()
end
defp trim(datetime) do
datetime
|> Map.put(:minute, 0)
|> Map.put(:second, 0)
|> Map.put(:microsecond, {0, 0})
end
defp years(years) do
now =
DateTime.utc_now()
|> trim()
not_before = backdate(now, 1) |> trim()
not_after = Map.put(now, :year, now.year + years)
Validity.new(not_before, not_after)
end
end
| 25.452055 | 68 | 0.660926 |
79942d029e4ba7f27ca0f0db3b4dd08756c1388f | 379 | exs | Elixir | example/test/test/example_3_test.exs | luochen52/mapd-charting | 88e3f049dc10921c5eff63a3b1b539af2a2a71d0 | [
"Apache-2.0"
] | 123 | 2018-10-08T01:05:11.000Z | 2022-02-20T12:06:24.000Z | example/test/test/example_3_test.exs | luochen52/mapd-charting | 88e3f049dc10921c5eff63a3b1b539af2a2a71d0 | [
"Apache-2.0"
] | 63 | 2018-10-04T18:47:00.000Z | 2022-02-11T18:35:15.000Z | example/test/test/example_3_test.exs | luochen52/mapd-charting | 88e3f049dc10921c5eff63a3b1b539af2a2a71d0 | [
"Apache-2.0"
] | 28 | 2018-11-02T07:19:56.000Z | 2022-02-27T14:26:28.000Z | defmodule ExampleThree do
use ExUnit.Case, async: true
use Hound.Helpers
use Whippet
@url Application.get_env(:test, :url)
setup do
Hound.start_session
navigate_to("#{@url}/example3.html")
:ok
end
test "Example 3" do
assert Chart.Raster.is_valid("#polymap", %{legend: false, use_map: true})
assert Chart.Line.is_valid("#timechart")
end
end
| 19.947368 | 77 | 0.686016 |
79943063dcd249edb84d00b97aba0161a045c535 | 583 | ex | Elixir | api/lib/designhubapi_web/controllers/fallback_controller.ex | rchatrath7/designhubapi | c38dafd3ba3a2b86ec3a12e71cbf95a5d366134d | [
"MIT"
] | null | null | null | api/lib/designhubapi_web/controllers/fallback_controller.ex | rchatrath7/designhubapi | c38dafd3ba3a2b86ec3a12e71cbf95a5d366134d | [
"MIT"
] | null | null | null | api/lib/designhubapi_web/controllers/fallback_controller.ex | rchatrath7/designhubapi | c38dafd3ba3a2b86ec3a12e71cbf95a5d366134d | [
"MIT"
] | null | null | null | defmodule DesignhubapiWeb.FallbackController do
@moduledoc """
Translates controller action results into valid `Plug.Conn` responses.
See `Phoenix.Controller.action_fallback/1` for more details.
"""
use DesignhubapiWeb, :controller
def call(conn, {:error, %Ecto.Changeset{} = changeset}) do
conn
|> put_status(:unprocessable_entity)
|> render(DesignhubapiWeb.ChangesetView, "error.json", changeset: changeset)
end
def call(conn, {:error, :not_found}) do
conn
|> put_status(:not_found)
|> render(DesignhubapiWeb.ErrorView, :"404")
end
end
| 27.761905 | 80 | 0.715266 |
799437742627279559b335b6c7ef8ca585482740 | 3,436 | ex | Elixir | lib/lexthink/server.ex | taybin/lexthink | 0afd178dbf372bedd0933b1d2a0f2a84615b96f9 | [
"Apache-2.0"
] | 1 | 2015-12-07T13:07:39.000Z | 2015-12-07T13:07:39.000Z | lib/lexthink/server.ex | taybin/lexthink | 0afd178dbf372bedd0933b1d2a0f2a84615b96f9 | [
"Apache-2.0"
] | null | null | null | lib/lexthink/server.ex | taybin/lexthink | 0afd178dbf372bedd0933b1d2a0f2a84615b96f9 | [
"Apache-2.0"
] | null | null | null | # Based off of bank_server by:
# Copyright (c) 2012, Loïc Hoguin <[email protected]>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
defmodule Lexthink.Server do
use GenServer.Behaviour
defrecord State, pools: [] do
record_type pools: [pid]
end
@spec start_link() :: {:ok, pid}
def start_link do
:gen_server.start_link({:local, __MODULE__}, __MODULE__, [], [])
end
@spec stop() :: :stopped
def stop do
:gen_server.call(__MODULE__, :stop)
end
@spec add_pool(any) :: :ok
def add_pool(ref) do
:gen_server.cast(__MODULE__, {:add_pool, ref})
end
@spec remove_pool(any) :: :ok
def remove_pool(ref) do
:gen_server.cast(__MODULE__, {:remove_pool, ref})
end
@spec add_worker(any, pid) :: :ok
def add_worker(ref, pid) do
:gen_server.cast(__MODULE__, {:add_worker, ref, pid})
end
@spec get_worker(any) :: pid
def get_worker(ref) do
workers = get_all_workers(ref)
{_, _, micro} = :erlang.now()
random = rem(1 + micro, length(workers))
Enum.at(workers, random)
end
@spec get_all_workers(any) :: [pid]
def get_all_workers(ref) do
:ets.lookup_element(__MODULE__, {:pool, ref}, 2)
end
# Callbacks
@spec init([]) :: {:ok, State.t}
def init([]) do
{:ok, State.new}
end
@spec handle_call(any, {pid, any}, :state) :: {:stop, :normal, :stopped, :state} |
{:reply, :ignored, :state}
def handle_call(:stop, _from, state) do
{:stop, :normal, :stopped, state}
end
def handle_call(_request, _from, state) do
{:reply, :ignored, state}
end
@spec handle_cast(any, :state) :: {:noreply, :state}
def handle_cast({:add_pool, ref}, state) do
:true = :ets.insert_new(__MODULE__, {{:pool, ref}, []})
{:noreply, state.pools([ref | state.pools])}
end
def handle_cast({:remove_pool, ref}, state) do
:true = :ets.delete(__MODULE__, {:pool, ref})
{:noreply, state.pools(List.delete(state.pools, ref))}
end
def handle_cast({:add_worker, ref, pid}, state) do
workers = :ets.lookup_element(__MODULE__, {:pool, ref}, 2)
:true = :ets.insert(__MODULE__, {{:pool, ref}, [pid|workers]})
_ = :erlang.monitor(:process, pid)
{:noreply, state}
end
def handle_cast(_request, state) do
{:noreply, state}
end
@spec handle_info(any, :state) :: {:noreply, :state}
def handle_info({'DOWN', _, :process, pid, _}, state) do
Enum.each(state.pools, fn(ref) ->
workers = :ets.lookup_element(__MODULE__, {:pool, ref}, 2)
if Enum.member?(workers, pid) do
true = :ets.insert(__MODULE__, {{:pool, ref},
List.delete(workers, pid)})
end
end)
{:noreply, state}
end
def handle_info(_info, state) do
{:noreply, state}
end
end
| 29.62069 | 84 | 0.655704 |
7994560a0cedb8663ae7233539af00843e5ef435 | 22,393 | ex | Elixir | lib/elixir/lib/kernel/parallel_compiler.ex | DmitryKakurin/elixir | a5df6a5a830d4cff8b7c8da54342b66cab999e0f | [
"Apache-2.0"
] | 1 | 2020-01-14T18:44:56.000Z | 2020-01-14T18:44:56.000Z | lib/elixir/lib/kernel/parallel_compiler.ex | DmitryKakurin/elixir | a5df6a5a830d4cff8b7c8da54342b66cab999e0f | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/kernel/parallel_compiler.ex | DmitryKakurin/elixir | a5df6a5a830d4cff8b7c8da54342b66cab999e0f | [
"Apache-2.0"
] | 1 | 2018-01-09T20:10:59.000Z | 2018-01-09T20:10:59.000Z | defmodule Kernel.ParallelCompiler do
@moduledoc """
A module responsible for compiling and requiring files in parallel.
"""
@doc """
Starts a task for parallel compilation.
If you have a file that needs to compile other modules in parallel,
the spawned processes need to be aware of the compiler environment.
This function allows a developer to create a task that is aware of
those environments.
See `Task.async/1` for more information. The task spawned must be
always awaited on by calling `Task.await/1`
"""
@doc since: "1.6.0"
def async(fun) when is_function(fun) do
if parent = :erlang.get(:elixir_compiler_pid) do
file = :erlang.get(:elixir_compiler_file)
dest = :erlang.get(:elixir_compiler_dest)
{:error_handler, error_handler} = :erlang.process_info(self(), :error_handler)
Task.async(fn ->
send(parent, {:async, self()})
:erlang.put(:elixir_compiler_pid, parent)
:erlang.put(:elixir_compiler_file, file)
dest != :undefined and :erlang.put(:elixir_compiler_dest, dest)
:erlang.process_flag(:error_handler, error_handler)
fun.()
end)
else
raise ArgumentError,
"cannot spawn parallel compiler task because " <>
"the current file is not being compiled/required"
end
end
@doc """
Compiles the given files.
Those files are compiled in parallel and can automatically
detect dependencies between them. Once a dependency is found,
the current file stops being compiled until the dependency is
resolved.
It returns `{:ok, modules, warnings}` or `{:error, errors, warnings}`.
Both errors and warnings are a list of three-element tuples containing
the file, line and the formatted error/warning.
## Options
* `:each_file` - for each file compiled, invokes the callback passing the
file
* `:each_long_compilation` - for each file that takes more than a given
timeout (see the `:long_compilation_threshold` option) to compile, invoke
this callback passing the file as its argument
* `:each_module` - for each module compiled, invokes the callback passing
the file, module and the module bytecode
* `:each_cycle` - after the given files are compiled, invokes this function
that should return the following values:
* `{:compile, modules}` - to continue compilation with a list of further modules to compile
* `{:runtime, modules}` - to stop compilation and verify the list of modules because
dependent modules have changed
* `:long_compilation_threshold` - the timeout (in seconds) after the
`:each_long_compilation` callback is invoked; defaults to `15`
* `:profile` - if set to `:time` measure the compilation time of each compilation cycle
and group pass checker
* `:dest` - the destination directory for the BEAM files. When using `compile/2`,
this information is only used to properly annotate the BEAM files before
they are loaded into memory. If you want a file to actually be written to
`dest`, use `compile_to_path/3` instead.
* `:beam_timestamp` - the modification timestamp to give all BEAM files
"""
@doc since: "1.6.0"
def compile(files, options \\ []) when is_list(options) do
spawn_workers(files, :compile, options)
end
@doc since: "1.6.0"
def compile_to_path(files, path, options \\ []) when is_binary(path) and is_list(options) do
spawn_workers(files, {:compile, path}, options)
end
@doc """
Requires the given files in parallel.
Opposite to compile, dependencies are not attempted to be
automatically solved between files.
It returns `{:ok, modules, warnings}` or `{:error, errors, warnings}`.
Both errors and warnings are a list of three-element tuples containing
the file, line and the formatted error/warning.
## Options
* `:each_file` - for each file compiled, invokes the callback passing the
file
* `:each_module` - for each module compiled, invokes the callback passing
the file, module and the module bytecode
"""
@doc since: "1.6.0"
def require(files, options \\ []) when is_list(options) do
spawn_workers(files, :require, options)
end
@doc false
@deprecated "Use Kernel.ParallelCompiler.compile/2 instead"
def files(files, options \\ []) when is_list(options) do
case spawn_workers(files, :compile, options) do
{:ok, modules, _} -> modules
{:error, _, _} -> exit({:shutdown, 1})
end
end
@doc false
@deprecated "Use Kernel.ParallelCompiler.compile_to_path/2 instead"
def files_to_path(files, path, options \\ []) when is_binary(path) and is_list(options) do
case spawn_workers(files, {:compile, path}, options) do
{:ok, modules, _} -> modules
{:error, _, _} -> exit({:shutdown, 1})
end
end
defp spawn_workers(files, output, options) do
{:module, _} = :code.ensure_loaded(Kernel.ErrorHandler)
compiler_pid = self()
:elixir_code_server.cast({:reset_warnings, compiler_pid})
schedulers = max(:erlang.system_info(:schedulers_online), 2)
result =
spawn_workers(files, 0, [], [], %{}, [], %{
dest: Keyword.get(options, :dest),
each_cycle: Keyword.get(options, :each_cycle, fn -> {:runtime, []} end),
each_file: Keyword.get(options, :each_file, fn _, _ -> :ok end) |> each_file(),
each_long_compilation: Keyword.get(options, :each_long_compilation, fn _file -> :ok end),
each_module: Keyword.get(options, :each_module, fn _file, _module, _binary -> :ok end),
beam_timestamp: Keyword.get(options, :beam_timestamp),
long_compilation_threshold: Keyword.get(options, :long_compilation_threshold, 15),
profile: Keyword.get(options, :profile),
cycle_start: System.monotonic_time(),
module_counter: 0,
output: output,
schedulers: schedulers
})
# In case --warning-as-errors is enabled and there was a warning,
# compilation status will be set to error.
compilation_status = :elixir_code_server.call({:compilation_status, compiler_pid})
case {result, compilation_status} do
{{:ok, _, warnings}, :error} ->
message = "Compilation failed due to warnings while using the --warnings-as-errors option"
IO.puts(:stderr, message)
{:error, warnings, []}
{{:error, errors, warnings}, :error} ->
{:error, errors ++ warnings, []}
_ ->
result
end
end
defp each_file(fun) when is_function(fun, 1), do: fn file, _ -> fun.(file) end
defp each_file(fun) when is_function(fun, 2), do: fun
defp each_file(file, lexical, parent) do
ref = Process.monitor(parent)
send(parent, {:file_ok, self(), ref, file, lexical})
receive do
^ref -> :ok
{:DOWN, ^ref, _, _, _} -> :ok
end
end
# We already have n=schedulers currently running, don't spawn new ones
defp spawn_workers(
queue,
spawned,
waiting,
files,
result,
warnings,
%{schedulers: schedulers} = state
)
when spawned - length(waiting) >= schedulers do
wait_for_messages(queue, spawned, waiting, files, result, warnings, state)
end
# Release waiting processes
defp spawn_workers([{ref, found} | t], spawned, waiting, files, result, warnings, state) do
waiting =
case List.keytake(waiting, ref, 2) do
{{_kind, pid, ^ref, _on, _defining, _deadlock}, waiting} ->
send(pid, {ref, found})
waiting
nil ->
# In case the waiting process died (for example, it was an async process),
# it will no longer be on the list. So we need to take it into account here.
waiting
end
spawn_workers(t, spawned, waiting, files, result, warnings, state)
end
defp spawn_workers([file | queue], spawned, waiting, files, result, warnings, state) do
%{output: output, long_compilation_threshold: threshold, dest: dest} = state
parent = self()
file = Path.expand(file)
{pid, ref} =
:erlang.spawn_monitor(fn ->
:erlang.put(:elixir_compiler_pid, parent)
:erlang.put(:elixir_compiler_file, file)
try do
case output do
{:compile, path} -> compile_file(file, path, parent)
:compile -> compile_file(file, dest, parent)
:require -> require_file(file, parent)
end
catch
kind, reason ->
send(parent, {:file_error, self(), file, {kind, reason, __STACKTRACE__}})
end
exit(:shutdown)
end)
timer_ref = Process.send_after(self(), {:timed_out, pid}, threshold * 1000)
files = [{pid, ref, file, timer_ref} | files]
spawn_workers(queue, spawned + 1, waiting, files, result, warnings, state)
end
# No more queue, nothing waiting, this cycle is done
defp spawn_workers([], 0, [], [], result, warnings, state) do
state = cycle_timing(result, state)
case each_cycle_return(state.each_cycle.()) do
{:runtime, dependent_modules} ->
write_and_verify_modules(result, warnings, dependent_modules, state)
{:compile, []} ->
write_and_verify_modules(result, warnings, [], state)
{:compile, more} ->
spawn_workers(more, 0, [], [], result, warnings, state)
end
end
# files x, waiting for x: POSSIBLE ERROR! Release processes so we get the failures
# Single entry, just release it.
defp spawn_workers(
[],
1,
[{_, pid, ref, _, _, _}] = waiting,
[{pid, _, _, _}] = files,
result,
warnings,
state
) do
spawn_workers([{ref, :not_found}], 1, waiting, files, result, warnings, state)
end
# Multiple entries, try to release modules.
defp spawn_workers([], spawned, waiting, files, result, warnings, state)
when length(waiting) == spawned do
# There is potentially a deadlock. We will release modules with
# the following order:
#
# 1. Code.ensure_compiled/1 checks (deadlock = soft)
# 2. Struct checks (deadlock = hard)
# 3. Modules without a known definition
# 4. Code invocation (deadlock = raise)
#
# In theory there is no difference between hard and raise, the
# difference is where the raise is happening, inside the compiler
# or in the caller.
cond do
deadlocked = deadlocked(waiting, :soft) || deadlocked(waiting, :hard) ->
spawn_workers(deadlocked, spawned, waiting, files, result, warnings, state)
without_definition = without_definition(waiting, files) ->
spawn_workers(without_definition, spawned, waiting, files, result, warnings, state)
true ->
errors = handle_deadlock(waiting, files)
{:error, errors, warnings}
end
end
# No more queue, but spawned and length(waiting) do not match
defp spawn_workers([], spawned, waiting, files, result, warnings, state) do
wait_for_messages([], spawned, waiting, files, result, warnings, state)
end
defp compile_file(file, path, parent) do
:erlang.process_flag(:error_handler, Kernel.ErrorHandler)
:erlang.put(:elixir_compiler_dest, path)
:elixir_compiler.file(file, &each_file(&1, &2, parent))
end
defp require_file(file, parent) do
case :elixir_code_server.call({:acquire, file}) do
:required ->
send(parent, {:file_cancel, self()})
:proceed ->
:elixir_compiler.file(file, &each_file(&1, &2, parent))
:elixir_code_server.cast({:required, file})
end
end
defp cycle_timing(result, %{profile: :time} = state) do
%{cycle_start: cycle_start, module_counter: module_counter} = state
num_modules = count_modules(result)
diff_modules = num_modules - module_counter
now = System.monotonic_time()
time = System.convert_time_unit(now - cycle_start, :native, :millisecond)
IO.puts(
:stderr,
"[profile] Finished compilation cycle of #{diff_modules} modules in #{time}ms"
)
%{state | cycle_start: now, module_counter: num_modules}
end
defp cycle_timing(_result, %{profile: nil} = state) do
state
end
defp count_modules(result) do
Enum.count(result, &match?({{:module, _}, _}, &1))
end
# TODO: Deprecate on v1.14
defp each_cycle_return(modules) when is_list(modules), do: {:compile, modules}
defp each_cycle_return(other), do: other
defp write_and_verify_modules(result, warnings, dependent_modules, state) do
modules = write_module_binaries(result, state)
checker_warnings = maybe_check_modules(result, dependent_modules, state)
warnings = Enum.reverse(warnings, checker_warnings)
{:ok, modules, warnings}
end
defp write_module_binaries(result, %{output: {:compile, path}, beam_timestamp: timestamp}) do
Enum.flat_map(result, fn
{{:module, module}, {binary, _map}} ->
full_path = Path.join(path, Atom.to_string(module) <> ".beam")
File.write!(full_path, binary)
if timestamp, do: File.touch!(full_path, timestamp)
[module]
_ ->
[]
end)
end
defp write_module_binaries(result, _state) do
for {{:module, module}, _} <- result, do: module
end
defp maybe_check_modules(result, runtime_modules, state) do
%{schedulers: schedulers, profile: profile} = state
if :elixir_config.get(:bootstrap) do
[]
else
compiled_modules = checker_compiled_modules(result)
runtime_modules = checker_runtime_modules(runtime_modules)
profile_checker(profile, compiled_modules, runtime_modules, fn ->
Module.ParallelChecker.verify(compiled_modules, runtime_modules, schedulers)
end)
end
end
defp checker_compiled_modules(result) do
for {{:module, _module}, {binary, module_map}} <- result do
{module_map, binary}
end
end
defp checker_runtime_modules(modules) do
for module <- modules,
path = :code.which(module),
is_list(path) do
{module, File.read!(path)}
end
end
defp profile_checker(_profile = :time, compiled_modules, runtime_modules, fun) do
{time, result} = :timer.tc(fun)
time = div(time, 1000)
num_modules = length(compiled_modules) + length(runtime_modules)
IO.puts(:stderr, "[profile] Finished group pass check of #{num_modules} modules in #{time}ms")
result
end
defp profile_checker(_profile = nil, _compiled_modules, _runtime_modules, fun) do
fun.()
end
# The goal of this function is to find leaves in the dependency graph,
# i.e. to find code that depends on code that we know is not being defined.
defp without_definition(waiting, files) do
nillify_empty(
for {pid, _, _, _} <- files,
{_, ^pid, ref, on, _, _} = List.keyfind(waiting, pid, 1),
not Enum.any?(waiting, fn {_, _, _, _, defining, _} -> on in defining end),
do: {ref, :not_found}
)
end
defp deadlocked(waiting, type) do
nillify_empty(for {_, _, ref, _, _, ^type} <- waiting, do: {ref, :deadlock})
end
defp nillify_empty([]), do: nil
defp nillify_empty([_ | _] = list), do: list
# Wait for messages from child processes
defp wait_for_messages(queue, spawned, waiting, files, result, warnings, state) do
%{output: output} = state
receive do
{:async, process} ->
Process.monitor(process)
wait_for_messages(queue, spawned + 1, waiting, files, result, warnings, state)
{:available, kind, module} ->
available =
for {^kind, _, ref, ^module, _defining, _deadlock} <- waiting,
do: {ref, :found}
result = Map.put(result, {kind, module}, true)
spawn_workers(available ++ queue, spawned, waiting, files, result, warnings, state)
{:module_available, child, ref, file, module, binary, module_map} ->
state.each_module.(file, module, binary)
# Release the module loader which is waiting for an ack
send(child, {ref, :ack})
available =
for {:module, _, ref, ^module, _defining, _deadlock} <- waiting,
do: {ref, :found}
cancel_waiting_timer(files, child)
result = Map.put(result, {:module, module}, {binary, module_map})
spawn_workers(available ++ queue, spawned, waiting, files, result, warnings, state)
# If we are simply requiring files, we do not add to waiting.
{:waiting, _kind, child, ref, _on, _defining, _deadlock} when output == :require ->
send(child, {ref, :not_found})
spawn_workers(queue, spawned, waiting, files, result, warnings, state)
{:waiting, kind, child, ref, on, defining, deadlock?} ->
# If we already got what we were waiting for, do not put it on waiting.
# Alternatively, we're waiting on ourselves,
# send :found so that we can crash with a better error.
waiting =
if Map.has_key?(result, {kind, on}) or on in defining do
send(child, {ref, :found})
waiting
else
[{kind, child, ref, on, defining, deadlock?} | waiting]
end
spawn_workers(queue, spawned, waiting, files, result, warnings, state)
{:timed_out, child} ->
case List.keyfind(files, child, 0) do
{^child, _, file, _} -> state.each_long_compilation.(file)
_ -> :ok
end
spawn_workers(queue, spawned, waiting, files, result, warnings, state)
{:warning, file, line, message} ->
file = file && Path.absname(file)
message = :unicode.characters_to_binary(message)
warning = {file, line, message}
wait_for_messages(queue, spawned, waiting, files, result, [warning | warnings], state)
{:file_ok, child_pid, ref, file, lexical} ->
state.each_file.(file, lexical)
send(child_pid, ref)
cancel_waiting_timer(files, child_pid)
discard_down(child_pid)
new_files = List.keydelete(files, child_pid, 0)
# Sometimes we may have spurious entries in the waiting list
# because someone invoked try/rescue UndefinedFunctionError
new_waiting = List.keydelete(waiting, child_pid, 1)
spawn_workers(queue, spawned - 1, new_waiting, new_files, result, warnings, state)
{:file_cancel, child_pid} ->
cancel_waiting_timer(files, child_pid)
discard_down(child_pid)
new_files = List.keydelete(files, child_pid, 0)
spawn_workers(queue, spawned - 1, waiting, new_files, result, warnings, state)
{:file_error, child_pid, file, {kind, reason, stack}} ->
print_error(file, kind, reason, stack)
cancel_waiting_timer(files, child_pid)
discard_down(child_pid)
files |> List.keydelete(child_pid, 0) |> terminate()
{:error, [to_error(file, kind, reason, stack)], warnings}
{:DOWN, ref, :process, pid, reason} ->
waiting = List.keydelete(waiting, pid, 1)
case handle_down(files, ref, reason) do
:ok -> wait_for_messages(queue, spawned - 1, waiting, files, result, warnings, state)
{:error, errors} -> {:error, errors, warnings}
end
end
end
defp discard_down(pid) do
receive do
{:DOWN, _, :process, ^pid, _} -> :ok
end
end
defp handle_down(_files, _ref, :normal) do
:ok
end
defp handle_down(files, ref, reason) do
case List.keyfind(files, ref, 1) do
{child_pid, ^ref, file, _timer_ref} ->
print_error(file, :exit, reason, [])
files
|> List.keydelete(child_pid, 0)
|> terminate()
{:error, [to_error(file, :exit, reason, [])]}
_ ->
:ok
end
end
defp handle_deadlock(waiting, files) do
deadlock =
for {pid, _, file, _} <- files do
{:current_stacktrace, stacktrace} = Process.info(pid, :current_stacktrace)
Process.exit(pid, :kill)
{kind, ^pid, _, on, _, _} = List.keyfind(waiting, pid, 1)
description = "deadlocked waiting on #{kind} #{inspect(on)}"
error = CompileError.exception(description: description, file: nil, line: nil)
print_error(file, :error, error, stacktrace)
{Path.relative_to_cwd(file), on, description}
end
IO.puts("""
Compilation failed because of a deadlock between files.
The following files depended on the following modules:
""")
max =
deadlock
|> Enum.map(&(&1 |> elem(0) |> String.length()))
|> Enum.max()
for {file, mod, _} <- deadlock do
IO.puts([" ", String.pad_leading(file, max), " => " | inspect(mod)])
end
IO.puts(
"\nEnsure there are no compile-time dependencies between those files " <>
"and that the modules they reference exist and are correctly named\n"
)
for {file, _, description} <- deadlock, do: {Path.absname(file), nil, description}
end
defp terminate(files) do
for {pid, _, _, _} <- files, do: Process.exit(pid, :kill)
for {pid, _, _, _} <- files, do: discard_down(pid)
:ok
end
defp print_error(file, kind, reason, stack) do
IO.write([
"\n== Compilation error in file #{Path.relative_to_cwd(file)} ==\n",
Kernel.CLI.format_error(kind, reason, stack)
])
end
defp cancel_waiting_timer(files, child_pid) do
case List.keyfind(files, child_pid, 0) do
{^child_pid, _ref, _file, timer_ref} ->
Process.cancel_timer(timer_ref)
# Let's flush the message in case it arrived before we canceled the timeout.
receive do
{:timed_out, ^child_pid} -> :ok
after
0 -> :ok
end
nil ->
:ok
end
end
defp to_error(file, kind, reason, stack) do
line = get_line(file, reason, stack)
file = Path.absname(file)
message = :unicode.characters_to_binary(Kernel.CLI.format_error(kind, reason, stack))
{file, line, message}
end
defp get_line(_file, %{line: line}, _stack) when is_integer(line) and line > 0 do
line
end
defp get_line(file, :undef, [{_, _, _, []}, {_, _, _, info} | _]) do
if Keyword.get(info, :file) == to_charlist(Path.relative_to_cwd(file)) do
Keyword.get(info, :line)
end
end
defp get_line(file, _reason, [{_, _, _, info} | _]) do
if Keyword.get(info, :file) == to_charlist(Path.relative_to_cwd(file)) do
Keyword.get(info, :line)
end
end
defp get_line(_, _, _) do
nil
end
end
| 34.031915 | 98 | 0.645559 |
7994d2e9a00426328d1e93f4aadc4f3817dbfc27 | 220 | ex | Elixir | fixtures/elixir_output/get_basic_auth_no_user.ex | martinsirbe/curlconverter | c5324e85d2ca24ef4743fb2bb36139d23367e293 | [
"MIT"
] | 4,955 | 2015-01-02T09:04:20.000Z | 2021-10-06T03:54:43.000Z | fixtures/elixir_output/get_basic_auth_no_user.ex | martinsirbe/curlconverter | c5324e85d2ca24ef4743fb2bb36139d23367e293 | [
"MIT"
] | 242 | 2015-03-27T05:59:11.000Z | 2021-10-03T08:36:05.000Z | fixtures/elixir_output/get_basic_auth_no_user.ex | martinsirbe/curlconverter | c5324e85d2ca24ef4743fb2bb36139d23367e293 | [
"MIT"
] | 504 | 2015-01-02T16:04:36.000Z | 2021-10-01T03:43:55.000Z | request = %HTTPoison.Request{
method: :get,
url: "https://api.test.com/",
options: [hackney: [basic_auth: {"", ~s|some_password|}]],
headers: [],
params: [],
body: ""
}
response = HTTPoison.request(request)
| 20 | 60 | 0.618182 |
7994db13ae19bc190802a95e9a71ea1e6fb4623f | 401 | exs | Elixir | priv/repo/migrations/20180522161113_gdpr_compliance.exs | nunopolonia/psc-api | 2e358503851cc04cdaa89201a3f56586f8746736 | [
"MIT"
] | 1 | 2017-09-10T23:51:40.000Z | 2017-09-10T23:51:40.000Z | priv/repo/migrations/20180522161113_gdpr_compliance.exs | nunopolonia/psc-api | 2e358503851cc04cdaa89201a3f56586f8746736 | [
"MIT"
] | 24 | 2018-03-14T18:17:00.000Z | 2021-03-01T07:47:53.000Z | priv/repo/migrations/20180522161113_gdpr_compliance.exs | portosummerofcode/psc-api | 2e358503851cc04cdaa89201a3f56586f8746736 | [
"MIT"
] | null | null | null | defmodule Api.Repo.Migrations.GdprCompliance do
use Ecto.Migration
def change do
alter table("users") do
add :data_usage_consent, :boolean, default: false
add :deleted_at, :naive_datetime
remove :birthday
remove :bio
remove :twitter_handle
remove :linkedin_url
remove :company
remove :college
remove :employment_status
end
end
end
| 21.105263 | 55 | 0.678304 |
7994ddfc5781058829fe05052c13ee1a9dbef5d6 | 5,426 | ex | Elixir | clients/service_control/lib/google_api/service_control/v1/model/audit_log.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/service_control/lib/google_api/service_control/v1/model/audit_log.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/service_control/lib/google_api/service_control/v1/model/audit_log.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.ServiceControl.V1.Model.AuditLog do
@moduledoc """
Common audit log format for Google Cloud Platform API operations.
## Attributes
- authenticationInfo (AuthenticationInfo): Authentication information. Defaults to: `null`.
- authorizationInfo ([AuthorizationInfo]): Authorization information. If there are multiple resources or permissions involved, then there is one AuthorizationInfo element for each {resource, permission} tuple. Defaults to: `null`.
- metadata (%{optional(String.t) => String.t}): Other service-specific data about the request, response, and other information associated with the current audited event. Defaults to: `null`.
- methodName (String.t): The name of the service method or operation. For API calls, this should be the name of the API method. For example, \"google.datastore.v1.Datastore.RunQuery\" \"google.logging.v1.LoggingService.DeleteLog\" Defaults to: `null`.
- numResponseItems (String.t): The number of items returned from a List or Query API method, if applicable. Defaults to: `null`.
- request (%{optional(String.t) => String.t}): The operation request. This may not include all request parameters, such as those that are too large, privacy-sensitive, or duplicated elsewhere in the log record. It should never include user-generated data, such as file contents. When the JSON object represented here has a proto equivalent, the proto name will be indicated in the `@type` property. Defaults to: `null`.
- requestMetadata (RequestMetadata): Metadata about the operation. Defaults to: `null`.
- resourceLocation (ResourceLocation): The resource location information. Defaults to: `null`.
- resourceName (String.t): The resource or collection that is the target of the operation. The name is a scheme-less URI, not including the API service name. For example: \"shelves/SHELF_ID/books\" \"shelves/SHELF_ID/books/BOOK_ID\" Defaults to: `null`.
- response (%{optional(String.t) => String.t}): The operation response. This may not include all response elements, such as those that are too large, privacy-sensitive, or duplicated elsewhere in the log record. It should never include user-generated data, such as file contents. When the JSON object represented here has a proto equivalent, the proto name will be indicated in the `@type` property. Defaults to: `null`.
- serviceData (%{optional(String.t) => String.t}): Deprecated, use `metadata` field instead. Other service-specific data about the request, response, and other activities. Defaults to: `null`.
- serviceName (String.t): The name of the API service performing the operation. For example, `\"datastore.googleapis.com\"`. Defaults to: `null`.
- status (Status): The status of the overall operation. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:authenticationInfo => GoogleApi.ServiceControl.V1.Model.AuthenticationInfo.t(),
:authorizationInfo => list(GoogleApi.ServiceControl.V1.Model.AuthorizationInfo.t()),
:metadata => map(),
:methodName => any(),
:numResponseItems => any(),
:request => map(),
:requestMetadata => GoogleApi.ServiceControl.V1.Model.RequestMetadata.t(),
:resourceLocation => GoogleApi.ServiceControl.V1.Model.ResourceLocation.t(),
:resourceName => any(),
:response => map(),
:serviceData => map(),
:serviceName => any(),
:status => GoogleApi.ServiceControl.V1.Model.Status.t()
}
field(:authenticationInfo, as: GoogleApi.ServiceControl.V1.Model.AuthenticationInfo)
field(:authorizationInfo, as: GoogleApi.ServiceControl.V1.Model.AuthorizationInfo, type: :list)
field(:metadata, type: :map)
field(:methodName)
field(:numResponseItems)
field(:request, type: :map)
field(:requestMetadata, as: GoogleApi.ServiceControl.V1.Model.RequestMetadata)
field(:resourceLocation, as: GoogleApi.ServiceControl.V1.Model.ResourceLocation)
field(:resourceName)
field(:response, type: :map)
field(:serviceData, type: :map)
field(:serviceName)
field(:status, as: GoogleApi.ServiceControl.V1.Model.Status)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceControl.V1.Model.AuditLog do
def decode(value, options) do
GoogleApi.ServiceControl.V1.Model.AuditLog.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceControl.V1.Model.AuditLog do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 64.595238 | 440 | 0.738481 |
7994f16497cc2c6ffe97c8002854840a6f44e829 | 7,295 | ex | Elixir | lib/mix/lib/mix/tasks/deps.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 19,291 | 2015-01-01T02:42:49.000Z | 2022-03-31T21:01:40.000Z | lib/mix/lib/mix/tasks/deps.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 8,082 | 2015-01-01T04:16:23.000Z | 2022-03-31T22:08:02.000Z | lib/mix/lib/mix/tasks/deps.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 3,472 | 2015-01-03T04:11:56.000Z | 2022-03-29T02:07:30.000Z | defmodule Mix.Tasks.Deps do
use Mix.Task
import Mix.Dep, only: [load_on_environment: 1, format_dep: 1, format_status: 1, check_lock: 1]
@shortdoc "Lists dependencies and their status"
@moduledoc ~S"""
Lists all dependencies and their status.
Dependencies must be specified in the `mix.exs` file in one of
the following formats:
{app, requirement}
{app, opts}
{app, requirement, opts}
Where:
* app is an atom
* requirement is a `Version` requirement or a regular expression
* opts is a keyword list of options
For example:
{:plug, ">= 0.4.0"}
{:gettext, git: "https://github.com/elixir-lang/gettext.git", tag: "0.1"}
{:local_dependency, path: "path/to/local_dependency"}
By default, dependencies are fetched using the [Hex package manager](https://hex.pm/):
{:plug, ">= 0.4.0"}
By specifying such dependencies, Mix will automatically install
Hex (if it wasn't previously installed) and download a package
suitable to your project. Note Hex expects the dependency
requirement to always be given and it will warn otherwise.
Mix also supports Git and path dependencies:
{:foobar, git: "https://github.com/elixir-lang/foobar.git", tag: "0.1"}
{:foobar, path: "path/to/foobar"}
And also in umbrella dependencies:
{:my_app, in_umbrella: true}
Path and in umbrella dependencies are automatically recompiled by
the parent project whenever they change. While fetchable dependencies,
like the ones using `:git`, are recompiled only when fetched/updated.
The dependencies' versions are expected to be formatted according to
Semantic Versioning and the requirements must be specified as defined
in the `Version` module.
## Options
Below we provide a more detailed look into the available options.
### Dependency definition options
* `:app` - when set to `false`, does not read the app file for this
dependency. By default, the app file is read
* `:env` - the environment (as an atom) to run the dependency on; defaults to `:prod`
* `:compile` - a command (string) to compile the dependency; defaults to a `mix`,
`rebar` or `make` command
* `:optional` - marks the dependency as optional. In such cases, the
current project will always include the optional dependency but any
other project that depends on the current project won't be forced to
use the optional dependency. However, if the other project includes
the optional dependency on its own, the requirements and options
specified here will also be applied. Optional dependencies will _not_
be started by the application.
* `:only` - the dependency is made available only in the given environments,
useful when declaring dev- or test-only dependencies; by default the
dependency will be available in all environments. The value of this option
can either be a single environment (like `:dev`) or a list of environments
(like `[:dev, :test]`)
* `:targets` - the dependency is made available only for the given targets.
By default the dependency will be available in all environments. The value
of this option can either be a single target (like `:host`) or a list of
environments (like `[:host, :rpi3]`)
* `:override` - if set to `true` the dependency will override any other
definitions of itself by other dependencies
* `:manager` - Mix can also compile Rebar, Rebar3 and makefile projects
and can fetch sub dependencies of Rebar and Rebar3 projects. Mix will
try to infer the type of project but it can be overridden with this
option by setting it to `:mix`, `:rebar3`, `:rebar` or `:make`. In case
there are conflicting definitions, the first manager in the list above
will be picked up. For example, if a dependency is found with `:rebar3`
and `:rebar` managers in different part of the trees, `:rebar3` will
be automatically picked. You can find the manager by running `mix deps`
and override it by setting the `:override` option in a top-level project.
* `:runtime` - whether the dependency is part of runtime applications.
If the `:applications` key is not provided in `def application` in your
`mix.exs` file, Mix will automatically include all dependencies as a runtime
application, except if `runtime: false` is given. Defaults to true.
* `:system_env` - an enumerable of key-value tuples of binaries to be set
as environment variables when loading or compiling the dependency
### Git options (`:git`)
* `:git` - the Git repository URI
* `:github` - a shortcut for specifying Git repos from GitHub, uses `:git`
* `:ref` - the reference to checkout (may be a branch, a commit SHA or a tag)
* `:branch` - the Git branch to checkout
* `:tag` - the Git tag to checkout
* `:submodules` - when `true`, initialize submodules for the repo
* `:sparse` - checkout a single directory inside the Git repository and use it
as your Mix dependency. Search "sparse Git checkouts" for more information.
* `:subdir` - (since v1.13.0) search for the project in the given directory
relative to the git checkout. This is similar to `:sparse` option but instead
of a doing a sparse checkout it does a full checkout.
If your Git repository requires authentication, such as basic username:password
HTTP authentication via URLs, it can be achieved via Git configuration, keeping
the access rules outside of source control.
git config --global url."https://YOUR_USER:[email protected]/".insteadOf "https://example.com/"
For more information, see the `git config` documentation:
https://git-scm.com/docs/git-config#git-config-urlltbasegtinsteadOf
### Path options (`:path`)
* `:path` - the path for the dependency
* `:in_umbrella` - when `true`, sets a path dependency pointing to
"../#{app}", sharing the same environment as the current application
### Hex options (`:hex`)
See the [Hex usage documentation](https://hex.pm/docs/usage) for Hex options.
## Deps task
`mix deps` task lists all dependencies in the following format:
APP VERSION (SCM) (MANAGER)
[locked at REF]
STATUS
For dependencies satisfied by Hex, `REF` is the package checksum.
For dependencies satisfied by git, `REF` is the commit object name,
and may include branch or tag information.
It supports the following options:
* `--all` - lists all dependencies, regardless of specified environment
"""
@impl true
def run(args) do
Mix.Project.get!()
{opts, _, _} = OptionParser.parse(args, switches: [all: :boolean])
loaded_opts = if opts[:all], do: [], else: [env: Mix.env(), target: Mix.target()]
shell = Mix.shell()
load_on_environment(loaded_opts)
|> Enum.sort_by(& &1.app)
|> Enum.each(fn dep ->
%Mix.Dep{scm: scm, manager: manager} = dep
dep = check_lock(dep)
extra = if manager, do: " (#{manager})", else: ""
shell.info("* #{format_dep(dep)}#{extra}")
if formatted = scm.format_lock(dep.opts) do
shell.info(" locked at #{formatted}")
end
shell.info(" #{format_status(dep)}")
end)
end
end
| 39.010695 | 105 | 0.690884 |
7994f3f270c47c6bb46c2f8dbf5aac87f898c878 | 7,967 | ex | Elixir | clients/document_ai/lib/google_api/document_ai/v1/model/google_cloud_documentai_v1_document_page.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/document_ai/lib/google_api/document_ai/v1/model/google_cloud_documentai_v1_document_page.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/document_ai/lib/google_api/document_ai/v1/model/google_cloud_documentai_v1_document_page.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPage do
@moduledoc """
A page in a Document.
## Attributes
* `blocks` (*type:* `list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageBlock.t)`, *default:* `nil`) - A list of visually detected text blocks on the page. A block has a set of lines (collected into paragraphs) that have a common line-spacing and orientation.
* `detectedLanguages` (*type:* `list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageDetectedLanguage.t)`, *default:* `nil`) - A list of detected languages together with confidence.
* `dimension` (*type:* `GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageDimension.t`, *default:* `nil`) - Physical dimension of the page.
* `formFields` (*type:* `list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageFormField.t)`, *default:* `nil`) - A list of visually detected form fields on the page.
* `image` (*type:* `GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageImage.t`, *default:* `nil`) - Rendered image for this page. This image is preprocessed to remove any skew, rotation, and distortions such that the annotation bounding boxes can be upright and axis-aligned.
* `layout` (*type:* `GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageLayout.t`, *default:* `nil`) - Layout for the page.
* `lines` (*type:* `list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageLine.t)`, *default:* `nil`) - A list of visually detected text lines on the page. A collection of tokens that a human would perceive as a line.
* `pageNumber` (*type:* `integer()`, *default:* `nil`) - 1-based index for current Page in a parent Document. Useful when a page is taken out of a Document for individual processing.
* `paragraphs` (*type:* `list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageParagraph.t)`, *default:* `nil`) - A list of visually detected text paragraphs on the page. A collection of lines that a human would perceive as a paragraph.
* `provenance` (*type:* `GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentProvenance.t`, *default:* `nil`) - The history of this page.
* `symbols` (*type:* `list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageSymbol.t)`, *default:* `nil`) - A list of visually detected symbols on the page.
* `tables` (*type:* `list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageTable.t)`, *default:* `nil`) - A list of visually detected tables on the page.
* `tokens` (*type:* `list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageToken.t)`, *default:* `nil`) - A list of visually detected tokens on the page.
* `transforms` (*type:* `list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageMatrix.t)`, *default:* `nil`) - Transformation matrices that were applied to the original document image to produce Page.image.
* `visualElements` (*type:* `list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageVisualElement.t)`, *default:* `nil`) - A list of detected non-text visual elements e.g. checkbox, signature etc. on the page.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:blocks =>
list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageBlock.t()) | nil,
:detectedLanguages =>
list(
GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageDetectedLanguage.t()
)
| nil,
:dimension =>
GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageDimension.t() | nil,
:formFields =>
list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageFormField.t())
| nil,
:image =>
GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageImage.t() | nil,
:layout =>
GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageLayout.t() | nil,
:lines =>
list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageLine.t()) | nil,
:pageNumber => integer() | nil,
:paragraphs =>
list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageParagraph.t())
| nil,
:provenance =>
GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentProvenance.t() | nil,
:symbols =>
list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageSymbol.t())
| nil,
:tables =>
list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageTable.t()) | nil,
:tokens =>
list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageToken.t()) | nil,
:transforms =>
list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageMatrix.t())
| nil,
:visualElements =>
list(
GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageVisualElement.t()
)
| nil
}
field(:blocks,
as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageBlock,
type: :list
)
field(:detectedLanguages,
as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageDetectedLanguage,
type: :list
)
field(:dimension, as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageDimension)
field(:formFields,
as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageFormField,
type: :list
)
field(:image, as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageImage)
field(:layout, as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageLayout)
field(:lines,
as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageLine,
type: :list
)
field(:pageNumber)
field(:paragraphs,
as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageParagraph,
type: :list
)
field(:provenance, as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentProvenance)
field(:symbols,
as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageSymbol,
type: :list
)
field(:tables,
as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageTable,
type: :list
)
field(:tokens,
as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageToken,
type: :list
)
field(:transforms,
as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageMatrix,
type: :list
)
field(:visualElements,
as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPageVisualElement,
type: :list
)
end
defimpl Poison.Decoder, for: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPage do
def decode(value, options) do
GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPage.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1DocumentPage do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 51.070513 | 294 | 0.730137 |
7994f477f5d0c6c4a4ad88e8813a04008a4f6e1a | 634 | ex | Elixir | lib/carbon.ex | elixir-carbon/carbon | 9537596c9a409e4573cb7cb75cfa0a2e31d9b153 | [
"Apache-2.0"
] | 13 | 2016-07-04T00:44:18.000Z | 2016-07-11T22:02:11.000Z | lib/carbon.ex | elixir-carbon/carbon | 9537596c9a409e4573cb7cb75cfa0a2e31d9b153 | [
"Apache-2.0"
] | 1 | 2016-07-12T23:09:45.000Z | 2016-07-13T01:19:11.000Z | lib/carbon.ex | elixirdrops/carbon | 9537596c9a409e4573cb7cb75cfa0a2e31d9b153 | [
"Apache-2.0"
] | 1 | 2016-12-10T07:27:04.000Z | 2016-12-10T07:27:04.000Z | defmodule Carbon do
def password_hash(password) do
Comeonin.Bcrypt.hashpwsalt(password)
end
def password_verify(password, hash) do
Comeonin.Bcrypt.checkpw(password, hash)
end
def password_verify(_) do
Comeonin.Bcrypt.dummy_checkpw
end
def password_reset_token do
# TODO: improve token generation
hash = :crypto.hash(:md5, "#{System.system_time}")
Base.encode16(hash, case: :lower)
end
def get_user(user_id) do
repo.get(model(), user_id)
end
def repo do
Application.get_env(:carbon, :repo)
end
def model do
Application.get_env(:carbon, :model, Carbon.User)
end
end
| 19.8125 | 54 | 0.705047 |
7994f9ac3e2b3d0fdb9d41457eff512674ccdab3 | 535 | ex | Elixir | lib/razorpay/error.ex | aarvay/razorpay-elixir | ef9c9360dc1fc433b3731a61aec97a1af90f694b | [
"ISC"
] | 1 | 2018-09-26T11:17:02.000Z | 2018-09-26T11:17:02.000Z | lib/razorpay/error.ex | aarvay/razorpay-elixir | ef9c9360dc1fc433b3731a61aec97a1af90f694b | [
"ISC"
] | null | null | null | lib/razorpay/error.ex | aarvay/razorpay-elixir | ef9c9360dc1fc433b3731a61aec97a1af90f694b | [
"ISC"
] | 2 | 2019-10-07T05:53:16.000Z | 2019-11-16T12:01:47.000Z | defmodule Razorpay.Error do
@derive [Poison.Encoder]
defexception [:code, :description, :field]
@type t :: %__MODULE__{code: binary, description: binary, field: binary}
@type type :: {:error, t}
def message(%{code: code, description: desc, field: field}) do
code =
code
|> String.downcase
|> Macro.camelize
message = "#{code}: #{desc}"
if field, do: message <> "in the field: \"#{field}\"", else: message
end
def exception(message) do
%__MODULE__{description: message}
end
end
| 24.318182 | 74 | 0.629907 |
7994fcb54a938579693ba77c12474a270f8ece73 | 492 | ex | Elixir | apps/jobex_web/lib/jobex_web/live/component/counter_comp.ex | andyl/jobex | a51d6ecc4e8e8d62ba7cdf3796305a5da369e4e2 | [
"MIT"
] | 10 | 2019-10-24T01:23:07.000Z | 2020-02-23T00:27:32.000Z | apps/jobex_web/lib/jobex_web/live/component/counter_comp.ex | andyl/crow | a51d6ecc4e8e8d62ba7cdf3796305a5da369e4e2 | [
"MIT"
] | 6 | 2021-03-09T18:41:09.000Z | 2021-05-27T13:02:42.000Z | apps/jobex_web/lib/jobex_web/live/component/counter_comp.ex | andyl/crow | a51d6ecc4e8e8d62ba7cdf3796305a5da369e4e2 | [
"MIT"
] | 1 | 2019-10-24T01:23:08.000Z | 2019-10-24T01:23:08.000Z | defmodule JobexWeb.Live.Component.CounterComp do
use Phoenix.LiveComponent
def render(assigns) do
~L"""
<div>
<h4>CounterComponent: <%= @count %></h4>
<button phx-click="com_dec">-</button>
<button phx-click="com_inc">+</button>
</div>
"""
end
def handle_event("com_inc", _, socket) do
{:noreply, update(socket, :count, &(&1 + 1))}
end
def handle_event("com_dec", _, socket) do
{:noreply, update(socket, :count, &(&1 - 1))}
end
end
| 21.391304 | 49 | 0.601626 |
799523f6fedd7a3fca70b477522958dd8a8bde51 | 185 | exs | Elixir | priv/repo/migrations/20201003190130_change_description_to_text_events.exs | maco2035/console | 2a9a65678b8c671c7d92cdb62dfcfc71b84957c5 | [
"Apache-2.0"
] | 83 | 2018-05-31T14:49:10.000Z | 2022-03-27T16:49:49.000Z | priv/repo/migrations/20201003190130_change_description_to_text_events.exs | maco2035/console | 2a9a65678b8c671c7d92cdb62dfcfc71b84957c5 | [
"Apache-2.0"
] | 267 | 2018-05-22T23:19:02.000Z | 2022-03-31T04:31:06.000Z | priv/repo/migrations/20201003190130_change_description_to_text_events.exs | maco2035/console | 2a9a65678b8c671c7d92cdb62dfcfc71b84957c5 | [
"Apache-2.0"
] | 18 | 2018-11-20T05:15:54.000Z | 2022-03-28T08:20:13.000Z | defmodule Console.Repo.Migrations.ChangeDescriptionToTextEvents do
use Ecto.Migration
def change do
alter table("events") do
modify :description, :text
end
end
end
| 18.5 | 66 | 0.735135 |
79956dab6ec728af2f551751f746ead9f47993e0 | 3,262 | ex | Elixir | clients/analytics_admin/lib/google_api/analytics_admin/v1alpha/model/google_analytics_admin_v1alpha_google_ads_link.ex | myskoach/elixir-google-api | 4f8cbc2fc38f70ffc120fd7ec48e27e46807b563 | [
"Apache-2.0"
] | null | null | null | clients/analytics_admin/lib/google_api/analytics_admin/v1alpha/model/google_analytics_admin_v1alpha_google_ads_link.ex | myskoach/elixir-google-api | 4f8cbc2fc38f70ffc120fd7ec48e27e46807b563 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/analytics_admin/lib/google_api/analytics_admin/v1alpha/model/google_analytics_admin_v1alpha_google_ads_link.ex | myskoach/elixir-google-api | 4f8cbc2fc38f70ffc120fd7ec48e27e46807b563 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaGoogleAdsLink do
@moduledoc """
A link between an GA4 property and a Google Ads account.
## Attributes
* `adsPersonalizationEnabled` (*type:* `boolean()`, *default:* `nil`) - Enable personalized advertising features with this integration. Automatically publish my Google Analytics audience lists and Google Analytics remarketing events/parameters to the linked Google Ads account. If this field is not set on create/update it will be defaulted to true.
* `canManageClients` (*type:* `boolean()`, *default:* `nil`) - Output only. If true, this link is for a Google Ads manager account.
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. Time when this link was originally created.
* `customerId` (*type:* `String.t`, *default:* `nil`) - Immutable. Google Ads customer ID.
* `emailAddress` (*type:* `String.t`, *default:* `nil`) - Output only. Email address of the user that created the link. An empty string will be returned if the email address can't be retrieved.
* `name` (*type:* `String.t`, *default:* `nil`) - Output only. Format: properties/{propertyId}/googleAdsLinks/{googleAdsLinkId} Note: googleAdsLinkId is not the Google Ads customer ID.
* `parent` (*type:* `String.t`, *default:* `nil`) - Immutable. Format: properties/{propertyId}
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. Time when this link was last updated.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:adsPersonalizationEnabled => boolean(),
:canManageClients => boolean(),
:createTime => DateTime.t(),
:customerId => String.t(),
:emailAddress => String.t(),
:name => String.t(),
:parent => String.t(),
:updateTime => DateTime.t()
}
field(:adsPersonalizationEnabled)
field(:canManageClients)
field(:createTime, as: DateTime)
field(:customerId)
field(:emailAddress)
field(:name)
field(:parent)
field(:updateTime, as: DateTime)
end
defimpl Poison.Decoder,
for: GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaGoogleAdsLink do
def decode(value, options) do
GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaGoogleAdsLink.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaGoogleAdsLink do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 44.684932 | 353 | 0.713979 |
7995a961cd09979d15ba81bcceaaca12b818204a | 1,349 | ex | Elixir | lib/galapagos_nao/selection.ex | jeffreyksmithjr/galapgos_nao | 6123d770b4019c8225d64b9a4a584b4ee0019063 | [
"MIT"
] | 21 | 2018-03-19T02:52:38.000Z | 2020-07-21T06:18:54.000Z | lib/galapagos_nao/selection.ex | jeffreyksmithjr/galapgos_nao | 6123d770b4019c8225d64b9a4a584b4ee0019063 | [
"MIT"
] | 23 | 2018-02-21T23:34:41.000Z | 2018-04-07T15:13:56.000Z | lib/galapagos_nao/selection.ex | jeffreyksmithjr/galapgos_nao | 6123d770b4019c8225d64b9a4a584b4ee0019063 | [
"MIT"
] | 1 | 2020-04-28T16:38:03.000Z | 2020-04-28T16:38:03.000Z | defmodule GN.Selection do
use Agent
def select(pid \\ __MODULE__, nets) do
cutoffs = cutoffs(nets)
for net <- nets do
complexity = length(net.layers)
level = Enum.min([Enum.find_index(cutoffs, &(&1 >= complexity)) + 1, complexity_levels()])
net_acc = net.test_acc
elite_acc = Map.get(get(level), :test_acc)
if is_nil(elite_acc) or net_acc > elite_acc do
put(pid, level, net)
end
end
get_all(pid)
end
def cutoffs(nets) do
max_complexity =
Enum.map(nets, &length(Map.get(&1, :layers)))
|> Enum.max()
interval = max_complexity / complexity_levels()
for level <- 1..complexity_levels() do
interval * level
end
end
def complexity_levels do
GN.Parameters.get(__MODULE__, :complexity_levels)
end
def start_link(opts \\ []) do
opts = Keyword.put_new(opts, :name, __MODULE__)
Agent.start_link(fn -> %{} end, opts)
end
def put_unevaluated(pid \\ __MODULE__, net) do
new_id = (get_all(pid) |> Map.keys() |> Enum.min(fn -> 0 end)) - 1
put(pid, new_id, net)
end
def put(pid \\ __MODULE__, key, net) do
Agent.update(pid, &Map.put(&1, key, net))
end
def get(pid \\ __MODULE__, key) do
Agent.get(pid, &Map.get(&1, key, %{}))
end
def get_all(pid \\ __MODULE__) do
Agent.get(pid, & &1)
end
end
| 22.864407 | 96 | 0.618977 |
7995e675dbaa5f2ff0768471e948eb904734a07f | 12,890 | exs | Elixir | test/crit_web/controllers/setup/animal_controller/update_test.exs | brownt23/crit19 | c45c7b3ae580c193168d83144da0eeb9bc91c8a9 | [
"MIT"
] | 6 | 2019-07-16T19:31:23.000Z | 2021-06-05T19:01:05.000Z | test/crit_web/controllers/setup/animal_controller/update_test.exs | brownt23/crit19 | c45c7b3ae580c193168d83144da0eeb9bc91c8a9 | [
"MIT"
] | null | null | null | test/crit_web/controllers/setup/animal_controller/update_test.exs | brownt23/crit19 | c45c7b3ae580c193168d83144da0eeb9bc91c8a9 | [
"MIT"
] | 3 | 2020-02-24T23:38:27.000Z | 2020-08-01T23:50:17.000Z | defmodule CritWeb.Setup.AnimalController.UpdateTest do
use CritWeb.ConnCase
use PhoenixIntegration
alias CritWeb.Setup.AnimalController, as: UnderTest
use CritWeb.ConnMacros, controller: UnderTest
alias CritBiz.ViewModels.Setup, as: VM
import Crit.RepoState
alias Crit.Exemplars, as: Ex
use FlowAssertions
setup :logged_in_as_setup_manager
# ----------------------------------------------------------------------------
describe "the update form" do
setup do
repo =
Ex.Bossie.create
|> service_gap_for("Bossie", name: "sg", starting: @earliest_date)
[repo: repo]
end
test "unit test", %{conn: conn, repo: repo} do
get_via_action(conn, :update_form, to_string(repo.bossie.id))
|> assert_purpose(form_for_editing_animal())
|> assert_user_sees(["Bossie", @earliest_iso_date])
end
test "details about form structure", %{conn: conn, repo: repo} do
inputs =
get_via_action(conn, :update_form, repo.bossie.id)
|> form_inputs(:animal)
inputs |> assert_animal_form_for(repo.bossie)
service_gap(inputs, 0) |> assert_empty_service_gap_form
service_gap(inputs, 1) |> assert_service_gap_form_for(repo.sg)
end
end
# ----------------------------------------------------------------------------
describe "update a single animal" do
setup do
repo =
Ex.Bossie.create
|> Ex.Bossie.put_service_gap(reason: "will change")
|> Ex.Bossie.put_service_gap(reason: "won't change")
|> Ex.Bossie.put_service_gap(reason: "will delete")
[repo: repo]
end
test "success", %{conn: conn, repo: repo} do
changes = %{name: "new name!",
service_gaps: %{
0 => %{"reason" => "newly added",
"in_service_datestring" => "2300-01-02",
"out_of_service_datestring" => "2300-01-03"
},
1 => %{"reason" => "replaces: will change"},
3 => %{"delete" => "true"}
}}
correct_update(conn, repo.bossie, changing: changes)
# Note that service gaps are not displayed as a part of a snippet
|> assert_user_sees("new name")
# Check that the changes propagate
animal = VM.Animal.fetch(:one_for_edit, repo.bossie.id, @institution)
assert_field(animal, name: "new name!")
[changed, _unchanged, added] = ListX.sorted_by_id(animal, :service_gaps)
assert_field(changed, reason: "replaces: will change")
assert_fields(added,
reason: "newly added",
in_service_datestring: "2300-01-02",
out_of_service_datestring: "2300-01-03")
end
test "a *blank* service gap form is ignored", %{conn: conn, repo: repo} do
original = VM.Animal.fetch(:one_for_edit, repo.bossie.id, @institution)
correct_update(conn, repo.bossie, changing: %{})
VM.Animal.fetch(:one_for_edit, repo.bossie.id, @institution)
|> assert_same_map(original, except: [lock_version: original.lock_version + 1])
end
test "validation failures produce appropriate messages in the HTML",
%{conn: conn, repo: repo} do
changes = %{in_service_datestring: @iso_date_2,
out_of_service_datestring: @iso_date_1,
service_gaps: %{1 => %{reason: ""}}}
incorrect_update(conn, repo.bossie, changing: changes)
|> assert_user_sees(@date_misorder_message)
|> assert_user_sees(@blank_message_in_html)
end
test "`name` constraint error is reported", %{conn: conn, repo: repo} do
animal(repo, "Jake")
changes = %{name: "Jake"}
incorrect_update(conn, repo.bossie, changing: changes)
|> assert_user_sees(@already_taken)
end
test "optimistic lock constraint error is reported", %{conn: conn, repo: repo} do
first_in_changes = %{name: "This is the new name"}
second_in_changes = %{name: "Jake"}
conn = get_via_action(conn, :update_form, repo.bossie.id)
# First try.
conn
|> follow_form(%{animal: first_in_changes})
|> assert_purpose(snippet_to_display_animal()) # success
# First try with same lock value.
conn
|> follow_form(%{animal: second_in_changes})
|> assert_purpose(form_for_editing_animal()) # fail
|> assert_user_sees(@animal_optimistic_lock)
# And the animal is updated with new values.
|> assert_user_sees(first_in_changes.name)
end
end
# ----------------------------------------------------------------------------
describe "handling of the 'add a new service gap' field when there are form errors" do
# This is tested here because it's easier to check that the right form
# is displayed than that the more-complex changeset structure is filled out
# correctly.
@sg_start @date_1
@iso_sg_start @iso_date_1
@sg_end @date_2
@iso_sg_end @iso_date_2
setup do
repo =
empty_repo(@equine_id)
|> animal("Jake", available: Ex.Datespan.named(:widest_infinite))
|> service_gap_for("Jake", name: "sg", starting: @sg_start, ending: @sg_end)
[repo: repo]
end
test "only an error in the animal part", %{conn: conn, repo: repo} do
same_as_in_service = Ex.Datespan.in_service_datestring(:widest_infinite)
changes = %{out_of_service_datestring: same_as_in_service}
inputs =
incorrect_update(conn, repo.jake, changing: changes)
|> assert_user_sees(@date_misorder_message)
|> form_inputs(:animal)
|> assert_field(out_of_service_datestring: same_as_in_service)
service_gap(inputs, 0) |> assert_empty_service_gap_form
service_gap(inputs, 1) |> assert_unchanged_service_gap_form(repo.sg)
end
test "an error only in the existing service gaps", %{conn: conn, repo: repo} do
changes = %{service_gaps: %{1 => %{in_service_datestring: @iso_sg_end}}}
inputs =
incorrect_update(conn, repo.jake, changing: changes)
|> assert_user_sees(@date_misorder_message)
|> form_inputs(:animal)
service_gap(inputs, 0) |> assert_empty_service_gap_form
service_gap(inputs, 1)
|> assert_unchanged_service_gap_form(repo.sg, except: changes.service_gaps[1])
end
test "an error only in the new service gap", %{conn: conn, repo: repo} do
changes = %{service_gaps: %{0 => %{out_of_service_datestring: @iso_sg_start}}}
inputs =
incorrect_update(conn, repo.jake, changing: changes)
|> assert_user_sees(@blank_message_in_html)
|> form_inputs(:animal)
service_gap(inputs, 0)
|> assert_empty_service_gap_form(except: changes.service_gaps[0])
service_gap(inputs, 1) |> assert_unchanged_service_gap_form(repo.sg)
end
test "errors in the new and old service gaps", %{conn: conn, repo: repo} do
changes =
%{service_gaps: %{0 => %{reason: "form now in error"},
1 => %{out_of_service_datestring: @iso_sg_start}}}
inputs =
incorrect_update(conn, repo.jake, changing: changes)
|> assert_user_sees(@date_misorder_message)
|> assert_user_sees(@blank_message_in_html)
|> form_inputs(:animal)
service_gap(inputs, 0)
|> assert_empty_service_gap_form(except: changes.service_gaps[0])
service_gap(inputs, 1)
|> assert_unchanged_service_gap_form(repo.sg, except: changes.service_gaps[1])
end
test "an error in the new service gap and animal", %{conn: conn, repo: repo} do
changes =
%{out_of_service_datestring: @earliest_iso_date,
service_gaps: %{0 => %{out_of_service_datestring: @iso_sg_start}}}
inputs =
incorrect_update(conn, repo.jake, changing: changes)
|> assert_user_sees(@date_misorder_message)
|> assert_user_sees(@blank_message_in_html)
|> form_inputs(:animal)
|> assert_field(out_of_service_datestring: @earliest_iso_date)
service_gap(inputs, 0)
|> assert_empty_service_gap_form(except: changes.service_gaps[0])
service_gap(inputs, 1) |> assert_unchanged_service_gap_form(repo.sg)
end
test "an error in the old service gap and animal", %{conn: conn, repo: repo} do
changes =
%{out_of_service_datestring: @earliest_iso_date,
service_gaps: %{1 => %{out_of_service_datestring: @iso_sg_start}}}
inputs =
incorrect_update(conn, repo.jake, changing: changes)
|> assert_user_sees(@date_misorder_message)
|> form_inputs(:animal)
|> assert_field(out_of_service_datestring: @earliest_iso_date)
service_gap(inputs, 0) |> assert_empty_service_gap_form
service_gap(inputs, 1)
|> assert_unchanged_service_gap_form(repo.sg, except: changes.service_gaps[1])
end
end
describe "the common case where there is no existing service gap" do
setup do
repo = Ex.Bossie.create
[repo: repo]
end
test "an error in the new service gap and animal", %{conn: conn, repo: repo} do
changes =
%{out_of_service_datestring: @earliest_iso_date,
service_gaps: %{0 => %{out_of_service_datestring: @iso_sg_start}}}
inputs =
incorrect_update(conn, repo.bossie, changing: changes)
|> assert_user_sees(@date_misorder_message)
|> assert_user_sees(@blank_message_in_html)
|> form_inputs(:animal)
|> assert_field(out_of_service_datestring: @earliest_iso_date)
service_gap(inputs, 0)
|> assert_empty_service_gap_form(except: changes.service_gaps[0])
end
test "an error in just the new service gap", %{conn: conn, repo: repo} do
changes =
%{service_gaps: %{0 => %{out_of_service_datestring: @iso_sg_start}}}
inputs =
incorrect_update(conn, repo.bossie, changing: changes)
|> assert_user_sees(@blank_message_in_html)
|> form_inputs(:animal)
service_gap(inputs, 0)
|> assert_empty_service_gap_form(except: changes.service_gaps[0])
end
test "an error in just the animal", %{conn: conn, repo: repo} do
changes = %{out_of_service_datestring: @earliest_iso_date}
inputs =
incorrect_update(conn, repo.bossie, changing: changes)
|> assert_user_sees(@date_misorder_message)
|> form_inputs(:animal)
|> assert_field(out_of_service_datestring: @earliest_iso_date)
service_gap(inputs, 0) |> assert_empty_service_gap_form
end
end
# ----------------------------------------------------------------------------
defp follow_update_form(conn, %{id: id}, [changing: changes]) do
get_via_action(conn, :update_form, id)
|> follow_form(%{animal: changes})
end
defp correct_update(conn, animal, opts) do
follow_update_form(conn, animal, opts)
|> assert_purpose(snippet_to_display_animal())
end
defp incorrect_update(conn, animal, opts) do
follow_update_form(conn, animal, opts)
|> assert_purpose(form_for_editing_animal())
end
# ----------------------------------------------------------------------------
defp service_gap(inputs, index), do: subform(inputs, :service_gaps, index)
defp assert_animal_form_for(inputs, ecto_version) do
keys = [:name, :lock_version, :in_service_datestring, :out_of_service_datestring]
expected = VM.Animal.lift(ecto_version, @institution)
assert_form_matches(inputs, view_model: expected, in: keys)
end
# These two functions check the same thing, but they're used in
# different contexts.
defp assert_unchanged_service_gap_form(inputs, ecto_version),
do: assert_service_gap_form_for(inputs, ecto_version)
defp assert_service_gap_form_for(inputs, ecto_version),
do: assert_unchanged_service_gap_form(inputs, ecto_version, except: [])
defp assert_unchanged_service_gap_form(inputs, ecto_version, [except: changes]) do
keys = [:reason, :in_service_datestring, :out_of_service_datestring, :id, :delete]
expected =
VM.ServiceGap.lift(ecto_version, @institution)
|> struct!(changes)
assert_form_matches(inputs, view_model: expected, in: keys)
end
defp assert_empty_service_gap_form(inputs),
do: assert_empty_service_gap_form(inputs, except: [])
defp assert_empty_service_gap_form(inputs, [except: changes]) when is_map(changes),
do: assert_empty_service_gap_form(inputs, except: Keyword.new(changes))
defp assert_empty_service_gap_form(inputs, [except: changes]) do
refute Map.has_key?(inputs, :id)
refute Map.has_key?(inputs, :delete)
expected = struct!(%VM.ServiceGap{}, changes)
keys = [:reason, :in_service_datestring, :out_of_service_datestring]
assert_form_matches(inputs, view_model: expected, in: keys)
end
end
| 36.309859 | 88 | 0.646315 |
79960d40b50bc6a3ae7ffcebc1004e44930dbbc1 | 1,762 | ex | Elixir | clients/prediction/lib/google_api/prediction/v16/model/analyze_data_description_categorical.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/prediction/lib/google_api/prediction/v16/model/analyze_data_description_categorical.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/prediction/lib/google_api/prediction/v16/model/analyze_data_description_categorical.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Prediction.V16.Model.AnalyzeDataDescriptionCategorical do
@moduledoc """
Description of the categorical values of this feature.
## Attributes
- count (String): Number of categorical values for this feature in the data. Defaults to: `null`.
- values (List[AnalyzeDataDescriptionCategoricalValues]): List of all the categories for this feature in the data set. Defaults to: `null`.
"""
defstruct [
:"count",
:"values"
]
end
defimpl Poison.Decoder, for: GoogleApi.Prediction.V16.Model.AnalyzeDataDescriptionCategorical do
import GoogleApi.Prediction.V16.Deserializer
def decode(value, options) do
value
|> deserialize(:"values", :list, GoogleApi.Prediction.V16.Model.AnalyzeDataDescriptionCategoricalValues, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Prediction.V16.Model.AnalyzeDataDescriptionCategorical do
def encode(value, options) do
GoogleApi.Prediction.V16.Deserializer.serialize_non_nil(value, options)
end
end
| 35.24 | 141 | 0.767877 |
79962b6ef64f860157c5a643521aee167186a180 | 920 | ex | Elixir | lib/credo/service/source_file_scopes.ex | sevenseacat/credo | 48837401040d9c2340b5fb9c7d786d31f89f6426 | [
"MIT"
] | 1 | 2020-01-31T10:23:37.000Z | 2020-01-31T10:23:37.000Z | lib/credo/service/source_file_scopes.ex | sevenseacat/credo | 48837401040d9c2340b5fb9c7d786d31f89f6426 | [
"MIT"
] | null | null | null | lib/credo/service/source_file_scopes.ex | sevenseacat/credo | 48837401040d9c2340b5fb9c7d786d31f89f6426 | [
"MIT"
] | null | null | null | defmodule Credo.Service.SourceFileScopes do
use GenServer
@table_name __MODULE__
def start_link(opts \\ []) do
{:ok, _pid} = GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
def get(filename) do
GenServer.call(__MODULE__, {:get, filename})
end
def put(filename, source) do
GenServer.call(__MODULE__, {:put, filename, source})
end
# callbacks
def init(_) do
ets = :ets.new(@table_name, [:named_table, read_concurrency: true])
{:ok, ets}
end
def handle_call({:get, filename}, _from, current_state) do
case :ets.lookup(@table_name, filename) do
[{^filename, value}] ->
{:reply, {:ok, value}, current_state}
[] ->
{:reply, :notfound, current_state}
end
end
def handle_call({:put, filename, source}, _from, current_state) do
:ets.insert(@table_name, {filename, source})
{:reply, source, current_state}
end
end
| 23 | 74 | 0.655435 |
79962c510f826e19f83bb7d82d76a5a6cf013e38 | 159 | exs | Elixir | base/fc_state_storage/test/fc_state_storage_test.exs | fleadope/freshcom | 8d5944befaa6eea8d31e5f5995939be2a1a44262 | [
"BSD-3-Clause"
] | 46 | 2018-10-13T23:18:13.000Z | 2021-08-07T07:46:51.000Z | base/fc_state_storage/test/fc_state_storage_test.exs | fleadope/freshcom | 8d5944befaa6eea8d31e5f5995939be2a1a44262 | [
"BSD-3-Clause"
] | 25 | 2018-10-14T00:56:07.000Z | 2019-12-23T19:41:02.000Z | base/fc_state_storage/test/fc_state_storage_test.exs | fleadope/freshcom | 8d5944befaa6eea8d31e5f5995939be2a1a44262 | [
"BSD-3-Clause"
] | 5 | 2018-12-16T04:39:51.000Z | 2020-10-01T12:17:03.000Z | defmodule FCStateStorageTest do
use ExUnit.Case
doctest FCStateStorage
test "greets the world" do
assert FCStateStorage.hello() == :world
end
end
| 17.666667 | 43 | 0.748428 |
799638fff2f16b4345d54ca352570d06d913338f | 477 | ex | Elixir | lib/elixir_ecommerce_web/controllers/public/products_controller.ex | abmBispo/elixir-ecommerce | 0507f7621d68ba8f0f65409a1a503683b7c0d37b | [
"MIT"
] | 4 | 2020-05-29T03:33:02.000Z | 2021-08-21T23:01:48.000Z | lib/elixir_ecommerce_web/controllers/public/products_controller.ex | abmBispo/elixir-ecommerce | 0507f7621d68ba8f0f65409a1a503683b7c0d37b | [
"MIT"
] | 2 | 2020-07-29T01:50:46.000Z | 2021-08-31T20:10:47.000Z | lib/elixir_ecommerce_web/controllers/public/products_controller.ex | abmBispo/elixir-ecommerce | 0507f7621d68ba8f0f65409a1a503683b7c0d37b | [
"MIT"
] | 1 | 2022-03-21T18:13:21.000Z | 2022-03-21T18:13:21.000Z | defmodule ElixirEcommerceWeb.ProductsController do
use ElixirEcommerceWeb, :controller
alias ElixirEcommerce.{
UserManager.User,
Product,
Department
}
plug ElixirEcommerceWeb.Authorize, resource: User
def show(conn, params) do
user = Guardian.Plug.current_resource(conn)
departments = Department.all()
product = Product.retrieve(params["id"])
render(conn, :show, departments: departments, product: product, current_user: user)
end
end
| 26.5 | 87 | 0.742138 |
79964f892276fda83244fee31d631e9c00078aa4 | 2,328 | ex | Elixir | higher_order/lib/list_comp.ex | pascal-p/elixir-etudes | 097937783f33ce19af0b61e1c331482226d1fd96 | [
"BSD-2-Clause"
] | null | null | null | higher_order/lib/list_comp.ex | pascal-p/elixir-etudes | 097937783f33ce19af0b61e1c331482226d1fd96 | [
"BSD-2-Clause"
] | null | null | null | higher_order/lib/list_comp.ex | pascal-p/elixir-etudes | 097937783f33ce19af0b61e1c331482226d1fd96 | [
"BSD-2-Clause"
] | null | null | null | defmodule ListComp do
# private function to return a list of people
@spec get_people() :: list(tuple())
defp get_people() do
[{"Federico", "M", 22}, {"Kim", "F", 45}, {"Hansa", "F", 30},
{"Tran", "M", 47}, {"Cathy", "F", 32}, {"Elias", "M", 50}]
end
@doc """
using pattern matching on a list of tuples to extract male over 40 (logical and)
## Examples
iex> ListComp.extract_male_over_40
[{"Elias", "M", 50}, {"Tran", "M", 47}]
"""
def extract_male_over_40(list \\ get_people()) do
extract_male_over_40(list, [])
end
@doc """
using pattern matching on a list of tuples to extract male or over 40.
## Examples
iex> ListComp.extract_male_or_over_40
[{"Elias", "M", 50}, {"Tran", "M", 47}, {"Kim", "F", 45}, {"Federico", "M", 22}]
"""
def extract_male_or_over_40(list \\ get_people) do
extract_male_or_over_40(list, [])
end
@doc """
using list comprehension and pattern matching on a list of tuples to extract male over
40 (logical and)
## Examples
iex> ListComp.extract_male_over_40_lc
[{"Tran", "M", 47}, {"Elias", "M", 50}]
"""
def extract_male_over_40_lc(list \\ get_people()) do
for {name, sex, age} <- list, sex == "M", age > 40 do
{name, sex, age}
end
end
@doc """
using list comprehension and pattern matching on a list of tuples to extract male or over 40.
## Examples
iex> ListComp.extract_male_or_over_40_lc
[{"Federico", "M", 22}, {"Kim", "F", 45}, {"Tran", "M", 47}, {"Elias", "M", 50}]
"""
def extract_male_or_over_40_lc(list \\ get_people) do
for {name, sex, age} <- list, sex == "M" or age > 40 do
{name, sex, age}
end
end
# private helper functions
defp extract_male_over_40([], rl), do: rl
defp extract_male_over_40([ {name, sex, age} | cdr ], rl) when sex == "M" and age > 40 do
extract_male_over_40(cdr, [ {name, sex, age} | rl ])
end
defp extract_male_over_40([ _ | cdr ], rl) do
extract_male_over_40(cdr, rl)
end
defp extract_male_or_over_40([], rl), do: rl
defp extract_male_or_over_40([ {name, sex, age} | cdr ], rl) when sex == "M" or age > 40 do
extract_male_or_over_40(cdr, [ {name, sex, age} | rl ])
end
defp extract_male_or_over_40([ _ | cdr ], rl) do
extract_male_or_over_40(cdr, rl)
end
end
| 25.304348 | 95 | 0.611254 |
7996598e8a7b1fbbc9795680c5836702dbdd272d | 24,856 | exs | Elixir | lib/elixir/test/elixir/base_test.exs | davidsulc/elixir | dd4fd6ab742acd75862e34e26dbdb86e0cf6453f | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/base_test.exs | davidsulc/elixir | dd4fd6ab742acd75862e34e26dbdb86e0cf6453f | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/base_test.exs | davidsulc/elixir | dd4fd6ab742acd75862e34e26dbdb86e0cf6453f | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule BaseTest do
use ExUnit.Case, async: true
doctest Base
import Base
test "encode16/1" do
assert "" == encode16("")
assert "66" == encode16("f")
assert "666F" == encode16("fo")
assert "666F6F" == encode16("foo")
assert "666F6F62" == encode16("foob")
assert "666F6F6261" == encode16("fooba")
assert "666F6F626172" == encode16("foobar")
assert "A1B2C3D4E5F67891" == encode16(<<161, 178, 195, 212, 229, 246, 120, 145>>)
assert "a1b2c3d4e5f67891" == encode16(<<161, 178, 195, 212, 229, 246, 120, 145>>, case: :lower)
end
test "decode16/1" do
assert {:ok, ""} == decode16("")
assert {:ok, "f"} == decode16("66")
assert {:ok, "fo"} == decode16("666F")
assert {:ok, "foo"} == decode16("666F6F")
assert {:ok, "foob"} == decode16("666F6F62")
assert {:ok, "fooba"} == decode16("666F6F6261")
assert {:ok, "foobar"} == decode16("666F6F626172")
assert {:ok, <<161, 178, 195, 212, 229, 246, 120, 145>>} == decode16("A1B2C3D4E5F67891")
assert {:ok, <<161, 178, 195, 212, 229, 246, 120, 145>>} == decode16("a1b2c3d4e5f67891", case: :lower)
assert {:ok, <<161, 178, 195, 212, 229, 246, 120, 145>>} == decode16("a1B2c3D4e5F67891", case: :mixed)
end
test "decode16!/1" do
assert "" == decode16!("")
assert "f" == decode16!("66")
assert "fo" == decode16!("666F")
assert "foo" == decode16!("666F6F")
assert "foob" == decode16!("666F6F62")
assert "fooba" == decode16!("666F6F6261")
assert "foobar" == decode16!("666F6F626172")
assert <<161, 178, 195, 212, 229, 246, 120, 145>> == decode16!("A1B2C3D4E5F67891")
assert <<161, 178, 195, 212, 229, 246, 120, 145>> == decode16!("a1b2c3d4e5f67891", case: :lower)
assert <<161, 178, 195, 212, 229, 246, 120, 145>> == decode16!("a1B2c3D4e5F67891", case: :mixed)
end
test "decode16/1 errors on non-alphabet digit" do
assert :error == decode16("66KF")
assert :error == decode16("66ff")
assert :error == decode16("66FF", case: :lower)
end
test "decode16!/1 errors on non-alphabet digit" do
assert_raise ArgumentError, "non-alphabet digit found: \"K\" (byte 75)", fn ->
decode16!("66KF")
end
assert_raise ArgumentError, "non-alphabet digit found: \"f\" (byte 102)", fn ->
decode16!("66ff")
end
assert_raise ArgumentError, "non-alphabet digit found: \"F\" (byte 70)", fn ->
decode16!("66FF", case: :lower)
end
end
test "decode16/1 errors on odd-length string" do
assert :error == decode16("666")
end
test "decode16!/1 errors odd-length string" do
assert_raise ArgumentError, "odd-length string", fn ->
decode16!("666")
end
end
test "encode64/1 can deal with empty strings" do
assert "" == encode64("")
end
test "encode64/1 with two pads" do
assert "QWxhZGRpbjpvcGVuIHNlc2FtZQ==" == encode64("Aladdin:open sesame")
end
test "encode64/1 with one pad" do
assert "SGVsbG8gV29ybGQ=" == encode64("Hello World")
end
test "encode64/1 with no pad" do
assert "QWxhZGRpbjpvcGVuIHNlc2Ft" == encode64("Aladdin:open sesam")
assert "MDEyMzQ1Njc4OSFAIzBeJiooKTs6PD4sLiBbXXt9" == encode64(<<"0123456789!@#0^&*();:<>,. []{}">>)
end
test "encode64/1 with one pad and ignoring padding" do
assert "SGVsbG8gV29ybGQ" == encode64("Hello World", padding: false)
end
test "encode64/1 with two pads and ignoring padding" do
assert "QWxhZGRpbjpvcGVuIHNlc2FtZQ" == encode64("Aladdin:open sesame", padding: false)
end
test "encode64/1 with no pads and ignoring padding" do
assert "QWxhZGRpbjpvcGVuIHNlc2Ft" == encode64("Aladdin:open sesam", padding: false)
end
test "decode64/1 can deal with empty strings" do
assert {:ok, ""} == decode64("")
end
test "decode64!/1 can deal with empty strings" do
assert "" == decode64!("")
end
test "decode64/1 with two pads" do
assert {:ok, "Aladdin:open sesame"} == decode64("QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
end
test "decode64!/1 with two pads" do
assert "Aladdin:open sesame" == decode64!("QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
end
test "decode64/1 with one pad" do
assert {:ok, "Hello World"} == decode64("SGVsbG8gV29ybGQ=")
end
test "decode64!/1 with one pad" do
assert "Hello World" == decode64!("SGVsbG8gV29ybGQ=")
end
test "decode64/1 with no pad" do
assert {:ok, "Aladdin:open sesam"} == decode64("QWxhZGRpbjpvcGVuIHNlc2Ft")
end
test "decode64!/1 with no pad" do
assert "Aladdin:open sesam" == decode64!("QWxhZGRpbjpvcGVuIHNlc2Ft")
end
test "decode64/1 errors on non-alphabet digit" do
assert :error == decode64("Zm9)")
end
test "decode64!/1 errors on non-alphabet digit" do
assert_raise ArgumentError, "non-alphabet digit found: \")\" (byte 41)", fn ->
decode64!("Zm9)")
end
end
test "decode64/1 errors on whitespace unless there's ignore: :whitespace" do
assert :error == decode64("\nQWxhZGRp bjpvcGVu\sIHNlc2Ft\t")
assert {:ok, "Aladdin:open sesam"} == decode64("\nQWxhZGRp bjpvcGVu\sIHNlc2Ft\t", ignore: :whitespace)
end
test "decode64!/1 errors on whitespace unless there's ignore: :whitespace" do
assert_raise ArgumentError, "non-alphabet digit found: \"\\n\" (byte 10)", fn ->
decode64!("\nQWxhZGRp bjpvcGVu\sIHNlc2Ft\t")
end
assert "Aladdin:open sesam" == decode64!("\nQWxhZGRp bjpvcGVu\sIHNlc2Ft\t", ignore: :whitespace)
end
test "decode64/1 errors on incorrect padding" do
assert :error == decode64("SGVsbG8gV29ybGQ")
end
test "decode64!/1 errors on incorrect padding" do
assert_raise ArgumentError, "incorrect padding", fn ->
decode64!("SGVsbG8gV29ybGQ")
end
end
test "decode64/2 with two pads and ignoring padding" do
assert {:ok, "Aladdin:open sesame"} == decode64("QWxhZGRpbjpvcGVuIHNlc2FtZQ", padding: false)
end
test "decode64!/2 with two pads and ignoring padding" do
assert "Aladdin:open sesame" == decode64!("QWxhZGRpbjpvcGVuIHNlc2FtZQ", padding: false)
end
test "decode64/2 with one pad and ignoring padding" do
assert {:ok, "Hello World"} == decode64("SGVsbG8gV29ybGQ", padding: false)
end
test "decode64!/2 with one pad and ignoring padding" do
assert "Hello World" == decode64!("SGVsbG8gV29ybGQ", padding: false)
end
test "decode64/2 with no pad and ignoring padding" do
assert {:ok, "Aladdin:open sesam"} == decode64("QWxhZGRpbjpvcGVuIHNlc2Ft", padding: false)
end
test "decode64!/2 with no pad and ignoring padding" do
assert "Aladdin:open sesam" == decode64!("QWxhZGRpbjpvcGVuIHNlc2Ft", padding: false)
end
test "decode64/2 with incorrect padding and ignoring padding" do
assert {:ok, "Hello World"} == decode64("SGVsbG8gV29ybGQ", padding: false)
end
test "decode64!/2 with incorrect padding and ignoring padding" do
assert "Hello World" == decode64!("SGVsbG8gV29ybGQ", padding: false)
end
test "url_encode64/1 can deal with empty strings" do
assert "" == url_encode64("")
end
test "url_encode64/1 with two pads" do
assert "QWxhZGRpbjpvcGVuIHNlc2FtZQ==" == url_encode64("Aladdin:open sesame")
end
test "url_encode64/1 with one pad" do
assert "SGVsbG8gV29ybGQ=" == url_encode64("Hello World")
end
test "url_encode64/1 with no pad" do
assert "QWxhZGRpbjpvcGVuIHNlc2Ft" == url_encode64("Aladdin:open sesam")
assert "MDEyMzQ1Njc4OSFAIzBeJiooKTs6PD4sLiBbXXt9" == url_encode64(<<"0123456789!@#0^&*();:<>,. []{}">>)
end
test "url_encode64/2 with two pads and ignoring padding" do
assert "QWxhZGRpbjpvcGVuIHNlc2FtZQ" == url_encode64("Aladdin:open sesame", padding: false)
end
test "url_encode64/2 with one pad and ignoring padding" do
assert "SGVsbG8gV29ybGQ" == url_encode64("Hello World", padding: false)
end
test "url_encode64/2 with no pad and ignoring padding" do
assert "QWxhZGRpbjpvcGVuIHNlc2Ft" == url_encode64("Aladdin:open sesam", padding: false)
end
test "url_encode64/1 doesn't produce URL-unsafe characters" do
refute "/3/+/A==" == url_encode64(<<255, 127, 254, 252>>)
assert "_3_-_A==" == url_encode64(<<255, 127, 254, 252>>)
end
test "url_decode64/1 can deal with empty strings" do
assert {:ok, ""} == url_decode64("")
end
test "url_decode64!/1 can deal with empty strings" do
assert "" == url_decode64!("")
end
test "url_decode64/1 with two pads" do
assert {:ok, "Aladdin:open sesame"} == url_decode64("QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
end
test "url_decode64!/1 with two pads" do
assert "Aladdin:open sesame" == url_decode64!("QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
end
test "url_decode64/1 with one pad" do
assert {:ok, "Hello World"} == url_decode64("SGVsbG8gV29ybGQ=")
end
test "url_decode64!/1 with one pad" do
assert "Hello World" == url_decode64!("SGVsbG8gV29ybGQ=")
end
test "url_decode64/1 with no pad" do
assert {:ok, "Aladdin:open sesam"} == url_decode64("QWxhZGRpbjpvcGVuIHNlc2Ft")
end
test "url_decode64!/1 with no pad" do
assert "Aladdin:open sesam" == url_decode64!("QWxhZGRpbjpvcGVuIHNlc2Ft")
end
test "url_decode64/1,2 error on whitespace unless there's ignore: :whitespace" do
assert :error == url_decode64("\nQWxhZGRp bjpvcGVu\sIHNlc2Ft\t")
assert {:ok, "Aladdin:open sesam"} == url_decode64("\nQWxhZGRp bjpvcGVu\sIHNlc2Ft\t", ignore: :whitespace)
end
test "url_decode64!/1,2 error on whitespace unless there's ignore: :whitespace" do
assert_raise ArgumentError, "non-alphabet digit found: \"\\n\" (byte 10)", fn ->
url_decode64!("\nQWxhZGRp bjpvcGVu\sIHNlc2Ft\t")
end
assert "Aladdin:open sesam" == url_decode64!("\nQWxhZGRp bjpvcGVu\sIHNlc2Ft\t", ignore: :whitespace)
end
test "url_decode64/1 errors on non-alphabet digit" do
assert :error == url_decode64("Zm9)")
end
test "url_decode64!/1 errors on non-alphabet digit" do
assert_raise ArgumentError, "non-alphabet digit found: \")\" (byte 41)", fn ->
url_decode64!("Zm9)")
end
end
test "url_decode64/1 errors on incorrect padding" do
assert :error == url_decode64("SGVsbG8gV29ybGQ")
end
test "url_decode64!/1 errors on incorrect padding" do
assert_raise ArgumentError, "incorrect padding", fn ->
url_decode64!("SGVsbG8gV29ybGQ")
end
end
test "url_decode64/2 with two pads and ignoring padding" do
assert {:ok, "Aladdin:open sesame"} == url_decode64("QWxhZGRpbjpvcGVuIHNlc2FtZQ", padding: false)
end
test "url_decode64!/2 with two pads and ignoring padding" do
assert "Aladdin:open sesame" == url_decode64!("QWxhZGRpbjpvcGVuIHNlc2FtZQ", padding: false)
end
test "url_decode64/2 with one pad and ignoring padding" do
assert {:ok, "Hello World"} == url_decode64("SGVsbG8gV29ybGQ", padding: false)
end
test "url_decode64!/2 with one pad and ignoring padding" do
assert "Hello World" == url_decode64!("SGVsbG8gV29ybGQ", padding: false)
end
test "url_decode64/2 with no pad and ignoring padding" do
assert {:ok, "Aladdin:open sesam"} == url_decode64("QWxhZGRpbjpvcGVuIHNlc2Ft", padding: false)
end
test "url_decode64!/2 with no pad and ignoring padding" do
assert "Aladdin:open sesam" == url_decode64!("QWxhZGRpbjpvcGVuIHNlc2Ft", padding: false)
end
test "url_decode64/2 ignores incorrect padding when :padding is false" do
assert {:ok, "Hello World"} == url_decode64("SGVsbG8gV29ybGQ", padding: false)
end
test "url_decode64!/2 ignores incorrect padding when :padding is false" do
assert "Hello World" == url_decode64!("SGVsbG8gV29ybGQ", padding: false)
end
test "encode32/1 can deal with empty strings" do
assert "" == encode32("")
end
test "encode32/1 with one pad" do
assert "MZXW6YQ=" == encode32("foob")
end
test "encode32/1 with three pads" do
assert "MZXW6===" == encode32("foo")
end
test "encode32/1 with four pads" do
assert "MZXQ====" == encode32("fo")
end
test "encode32/1 with six pads" do
assert "MZXW6YTBOI======" == encode32("foobar")
assert "MY======" == encode32("f")
end
test "encode32/1 with no pads" do
assert "MZXW6YTB" == encode32("fooba")
end
test "encode32/2 with one pad and ignoring padding" do
assert "MZXW6YQ" == encode32("foob", padding: false)
end
test "encode32/2 with three pads and ignoring padding" do
assert "MZXW6" == encode32("foo", padding: false)
end
test "encode32/2 with four pads and ignoring padding" do
assert "MZXQ" == encode32("fo", padding: false)
end
test "encode32/2 with six pads and ignoring padding" do
assert "MZXW6YTBOI" == encode32("foobar", padding: false)
end
test "encode32/2 with no pads and ignoring padding" do
assert "MZXW6YTB" == encode32("fooba", padding: false)
end
test "encode32/2 with lowercase" do
assert "mzxw6ytb" == encode32("fooba", case: :lower)
end
test "decode32/1 can deal with empty strings" do
assert {:ok, ""} == decode32("")
end
test "decode32!/2 can deal with empty strings" do
assert "" == decode32!("")
end
test "decode32/1 with one pad" do
assert {:ok, "foob"} == decode32("MZXW6YQ=")
end
test "decode32!/1 with one pad" do
assert "foob" == decode32!("MZXW6YQ=")
end
test "decode32/1 with three pads" do
assert {:ok, "foo"} == decode32("MZXW6===")
end
test "decode32!/1 with three pads" do
assert "foo" == decode32!("MZXW6===")
end
test "decode32/1 with four pads" do
assert {:ok, "fo"} == decode32("MZXQ====")
end
test "decode32!/1 with four pads" do
assert "fo" == decode32!("MZXQ====")
end
test "decode32/2 with lowercase" do
assert {:ok, "fo"} == decode32("mzxq====", case: :lower)
end
test "decode32!/2 with lowercase" do
assert "fo" == decode32!("mzxq====", case: :lower)
end
test "decode32/2 with mixed case" do
assert {:ok, "fo"} == decode32("mZXq====", case: :mixed)
end
test "decode32!/2 with mixed case" do
assert "fo" == decode32!("mZXq====", case: :mixed)
end
test "decode32/1 with six pads" do
assert {:ok, "foobar"} == decode32("MZXW6YTBOI======")
assert {:ok, "f"} == decode32("MY======")
end
test "decode32!/1 with six pads" do
assert "foobar" == decode32!("MZXW6YTBOI======")
assert "f" == decode32!("MY======")
end
test "decode32/1 with no pads" do
assert {:ok, "fooba"} == decode32("MZXW6YTB")
end
test "decode32!/1 with no pads" do
assert "fooba" == decode32!("MZXW6YTB")
end
test "decode32/1,2 error on non-alphabet digit" do
assert :error == decode32("MZX)6YTB")
assert :error == decode32("66ff")
assert :error == decode32("66FF", case: :lower)
end
test "decode32!/1,2 error on non-alphabet digit" do
assert_raise ArgumentError, "non-alphabet digit found: \")\" (byte 41)", fn ->
decode32!("MZX)6YTB")
end
assert_raise ArgumentError, "non-alphabet digit found: \"m\" (byte 109)", fn ->
decode32!("mzxw6ytboi======")
end
assert_raise ArgumentError, "non-alphabet digit found: \"M\" (byte 77)", fn ->
decode32!("MZXW6YTBOI======", case: :lower)
end
end
test "decode32/1 errors on incorrect padding" do
assert :error == decode32("MZXW6YQ")
end
test "decode32!/1 errors on incorrect padding" do
assert_raise ArgumentError, "incorrect padding", fn ->
decode32!("MZXW6YQ")
end
end
test "decode32/2 with one pad and :padding to false" do
assert {:ok, "foob"} == decode32("MZXW6YQ", padding: false)
end
test "decode32!/2 with one pad and :padding to false" do
assert "foob" == decode32!("MZXW6YQ", padding: false)
end
test "decode32/2 with three pads and ignoring padding" do
assert {:ok, "foo"} == decode32("MZXW6", padding: false)
end
test "decode32!/2 with three pads and ignoring padding" do
assert "foo" == decode32!("MZXW6", padding: false)
end
test "decode32/2 with four pads and ignoring padding" do
assert {:ok, "fo"} == decode32("MZXQ", padding: false)
end
test "decode32!/2 with four pads and ignoring padding" do
assert "fo" == decode32!("MZXQ", padding: false)
end
test "decode32/2 with :lower case and ignoring padding" do
assert {:ok, "fo"} == decode32("mzxq", case: :lower, padding: false)
end
test "decode32!/2 with :lower case and ignoring padding" do
assert "fo" == decode32!("mzxq", case: :lower, padding: false)
end
test "decode32/2 with :mixed case and ignoring padding" do
assert {:ok, "fo"} == decode32("mZXq", case: :mixed, padding: false)
end
test "decode32!/2 with :mixed case and ignoring padding" do
assert "fo" == decode32!("mZXq", case: :mixed, padding: false)
end
test "decode32/2 with six pads and ignoring padding" do
assert {:ok, "foobar"} == decode32("MZXW6YTBOI", padding: false)
end
test "decode32!/2 with six pads and ignoring padding" do
assert "foobar" == decode32!("MZXW6YTBOI", padding: false)
end
test "decode32/2 with no pads and ignoring padding" do
assert {:ok, "fooba"} == decode32("MZXW6YTB", padding: false)
end
test "decode32!/2 with no pads and ignoring padding" do
assert "fooba" == decode32!("MZXW6YTB", padding: false)
end
test "decode32/2 ignores incorrect padding when :padding is false" do
assert {:ok, "foob"} == decode32("MZXW6YQ", padding: false)
end
test "decode32!/2 ignores incorrect padding when :padding is false" do
"foob" = decode32!("MZXW6YQ", padding: false)
end
test "hex_encode32/1 can deal with empty strings" do
assert "" == hex_encode32("")
end
test "hex_encode32/1 with one pad" do
assert "CPNMUOG=" == hex_encode32("foob")
end
test "hex_encode32/1 with three pads" do
assert "CPNMU===" == hex_encode32("foo")
end
test "hex_encode32/1 with four pads" do
assert "CPNG====" == hex_encode32("fo")
end
test "hex_encode32/1 with six pads" do
assert "CPNMUOJ1E8======" == hex_encode32("foobar")
assert "CO======" == hex_encode32("f")
end
test "hex_encode32/1 with no pads" do
assert "CPNMUOJ1" == hex_encode32("fooba")
end
test "hex_encode32/2 with one pad and ignoring padding" do
assert "CPNMUOG" == hex_encode32("foob", padding: false)
end
test "hex_encode32/2 with three pads and ignoring padding" do
assert "CPNMU" == hex_encode32("foo", padding: false)
end
test "hex_encode32/2 with four pads and ignoring padding" do
assert "CPNG" == hex_encode32("fo", padding: false)
end
test "hex_encode32/2 with six pads and ignoring padding" do
assert "CPNMUOJ1E8" == hex_encode32("foobar", padding: false)
end
test "hex_encode32/2 with no pads and ignoring padding" do
assert "CPNMUOJ1" == hex_encode32("fooba", padding: false)
end
test "hex_encode32/2 with lowercase" do
assert "cpnmuoj1" == hex_encode32("fooba", case: :lower)
end
test "hex_decode32/1 can deal with empty strings" do
assert {:ok, ""} == hex_decode32("")
end
test "hex_decode32!/1 can deal with empty strings" do
assert "" == hex_decode32!("")
end
test "hex_decode32/1 with one pad" do
assert {:ok, "foob"} == hex_decode32("CPNMUOG=")
end
test "hex_decode32!/1 with one pad" do
assert "foob" == hex_decode32!("CPNMUOG=")
end
test "hex_decode32/1 with three pads" do
assert {:ok, "foo"} == hex_decode32("CPNMU===")
end
test "hex_decode32!/1 with three pads" do
assert "foo" == hex_decode32!("CPNMU===")
end
test "hex_decode32/1 with four pads" do
assert {:ok, "fo"} == hex_decode32("CPNG====")
end
test "hex_decode32!/1 with four pads" do
assert "fo" == hex_decode32!("CPNG====")
end
test "hex_decode32/1 with six pads" do
assert {:ok, "foobar"} == hex_decode32("CPNMUOJ1E8======")
assert {:ok, "f"} == hex_decode32("CO======")
end
test "hex_decode32!/1 with six pads" do
assert "foobar" == hex_decode32!("CPNMUOJ1E8======")
assert "f" == hex_decode32!("CO======")
end
test "hex_decode32/1 with no pads" do
assert {:ok, "fooba"} == hex_decode32("CPNMUOJ1")
end
test "hex_decode32!/1 with no pads" do
assert "fooba" == hex_decode32!("CPNMUOJ1")
end
test "hex_decode32/1,2 error on non-alphabet digit" do
assert :error == hex_decode32("CPN)UOJ1")
assert :error == hex_decode32("66f")
assert :error == hex_decode32("66F", case: :lower)
end
test "hex_decode32!/1,2 error non-alphabet digit" do
assert_raise ArgumentError, "non-alphabet digit found: \")\" (byte 41)", fn ->
hex_decode32!("CPN)UOJ1")
end
assert_raise ArgumentError, "non-alphabet digit found: \"c\" (byte 99)", fn ->
hex_decode32!("cpnmuoj1e8======")
end
assert_raise ArgumentError, "non-alphabet digit found: \"C\" (byte 67)", fn ->
hex_decode32!("CPNMUOJ1E8======", case: :lower)
end
end
test "hex_decode32/1 errors on incorrect padding" do
assert :error == hex_decode32("CPNMUOG")
end
test "hex_decode32!/1 errors on incorrect padding" do
assert_raise ArgumentError, "incorrect padding", fn ->
hex_decode32!("CPNMUOG")
end
end
test "hex_decode32/2 with lowercase" do
assert {:ok, "fo"} == hex_decode32("cpng====", case: :lower)
end
test "hex_decode32!/2 with lowercase" do
assert "fo" == hex_decode32!("cpng====", case: :lower)
end
test "hex_decode32/2 with mixed case" do
assert {:ok, "fo"} == hex_decode32("cPNg====", case: :mixed)
end
test "hex_decode32!/2 with mixed case" do
assert "fo" == hex_decode32!("cPNg====", case: :mixed)
end
test "decode16!/1 errors on non-UTF-8 char" do
assert_raise ArgumentError, "non-alphabet digit found: \"\\0\" (byte 0)", fn ->
decode16!("012" <> <<0>>)
end
end
test "hex_decode32/2 with one pad and ignoring padding" do
assert {:ok, "foob"} == hex_decode32("CPNMUOG", padding: false)
end
test "hex_decode32!/2 with one pad and ignoring padding" do
assert "foob" == hex_decode32!("CPNMUOG", padding: false)
end
test "hex_decode32/2 with three pads and ignoring padding" do
assert {:ok, "foo"} == hex_decode32("CPNMU", padding: false)
end
test "hex_decode32!/2 with three pads and ignoring padding" do
assert "foo" == hex_decode32!("CPNMU", padding: false)
end
test "hex_decode32/2 with four pads and ignoring padding" do
assert {:ok, "fo"} == hex_decode32("CPNG", padding: false)
end
test "hex_decode32!/2 with four pads and ignoring padding" do
assert "fo" == hex_decode32!("CPNG", padding: false)
end
test "hex_decode32/2 with six pads and ignoring padding" do
assert {:ok, "foobar"} == hex_decode32("CPNMUOJ1E8", padding: false)
end
test "hex_decode32!/2 with six pads and ignoring padding" do
assert "foobar" == hex_decode32!("CPNMUOJ1E8", padding: false)
end
test "hex_decode32/2 with no pads and ignoring padding" do
assert {:ok, "fooba"} == hex_decode32("CPNMUOJ1", padding: false)
end
test "hex_decode32!/2 with no pads and ignoring padding" do
assert "fooba" == hex_decode32!("CPNMUOJ1", padding: false)
end
test "hex_decode32/2 ignores incorrect padding when :padding is false" do
assert {:ok, "foob"} == hex_decode32("CPNMUOG", padding: false)
end
test "hex_decode32!/2 ignores incorrect padding when :padding is false" do
"foob" = hex_decode32!("CPNMUOG", padding: false)
end
test "hex_decode32/2 with :lower case and ignoring padding" do
assert {:ok, "fo"} == hex_decode32("cpng", case: :lower, padding: false)
end
test "hex_decode32!/2 with :lower case and ignoring padding" do
assert "fo" == hex_decode32!("cpng", case: :lower, padding: false)
end
test "hex_decode32/2 with :mixed case and ignoring padding" do
assert {:ok, "fo"} == hex_decode32("cPNg====", case: :mixed, padding: false)
end
test "hex_decode32!/2 with :mixed case and ignoring padding" do
assert "fo" == hex_decode32!("cPNg", case: :mixed, padding: false)
end
test "encode then decode is identity" do
for {encode, decode} <- [{&encode16/2, &decode16!/2},
{&encode32/2, &decode32!/2},
{&hex_encode32/2, &hex_decode32!/2},
{&encode64/2, &decode64!/2},
{&url_encode64/2, &url_decode64!/2}],
encode_case <- [:upper, :lower],
decode_case <- [:upper, :lower, :mixed],
(encode_case == decode_case) or (decode_case == :mixed),
pad? <- [true, false],
len <- 0..256 do
data =
0
|> :lists.seq(len - 1)
|> Enum.shuffle()
|> IO.iodata_to_binary()
expected =
data
|> encode.([case: encode_case, pad: pad?])
|> decode.([case: decode_case, pad: pad?])
assert data == expected, "identity did not match for #{inspect data} when #{inspect encode} (#{encode_case})"
end
end
end
| 31.866667 | 115 | 0.656703 |
79968263a07c0210a5a94167b38e9dd9725c7256 | 6,132 | exs | Elixir | test/commodity_api/iam/user/phone_number/phone_number_policy_test.exs | akdilsiz/commodity-cloud | 08c366c9fc95fbb3565131672db4cc52f8b870c9 | [
"Apache-2.0"
] | 7 | 2019-04-11T21:12:49.000Z | 2021-04-14T12:56:42.000Z | test/commodity_api/iam/user/phone_number/phone_number_policy_test.exs | akdilsiz/commodity-cloud | 08c366c9fc95fbb3565131672db4cc52f8b870c9 | [
"Apache-2.0"
] | null | null | null | test/commodity_api/iam/user/phone_number/phone_number_policy_test.exs | akdilsiz/commodity-cloud | 08c366c9fc95fbb3565131672db4cc52f8b870c9 | [
"Apache-2.0"
] | 2 | 2019-06-06T18:05:33.000Z | 2019-07-16T08:49:45.000Z | ##
# Copyright 2018 Abdulkadir DILSIZ
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
defmodule Commodity.Api.Iam.User.PhoneNumberPolicyTest do
use Commodity.ConnCase
alias Commodity.Factory
@moduletag login: :user_two
test "list all user phone_numbers with given identifier and self permission",
%{conn: conn, user: user} do
permission_set = Repo.get_by!(PermissionSet, name: "Self")
Factory.insert(:permission_set_grant, user: user, target_user: user,
permission_set: permission_set)
conn = get conn, iam_user_phone_number_path(conn, :index, user.id)
assert conn.status == 200
end
test "should be 403 error list all user phone_numbers with other " <>
"user identifier and self permission",
%{conn: conn, user: user} do
permission_set = Repo.get_by!(PermissionSet, name: "Self")
Factory.insert(:permission_set_grant, user: user, target_user: user,
permission_set: permission_set)
user = Factory.insert(:user)
conn = get conn, iam_user_phone_number_path(conn, :index, user.id)
assert conn.status == 403
end
test "show a user phone_number with given identifiers and self permission",
%{conn: conn, user: user} do
permission_set = Repo.get_by!(PermissionSet, name: "Self")
Factory.insert(:permission_set_grant, user: user, target_user: user,
permission_set: permission_set)
phone_number = Factory.insert(:user_phone_number, user: user)
conn =
get conn, iam_user_phone_number_path(conn, :show, user.id,
phone_number.id)
assert conn.status == 200
end
test "should be 403 error show a user phone_number with given identifiers " <>
"(other user identifier) and self permission",
%{conn: conn, user: user} do
permission_set = Repo.get_by!(PermissionSet, name: "Self")
Factory.insert(:permission_set_grant, user: user, target_user: user,
permission_set: permission_set)
user = Factory.insert(:user)
phone_number = Factory.insert(:user_phone_number, user: user)
conn =
get conn, iam_user_phone_number_path(conn, :show, user.id,
phone_number.id)
assert conn.status == 403
end
test "create a user phone_number with given identifier, valid params " <>
"and self permission",
%{conn: conn, user: user} do
permission_set = Repo.get_by!(PermissionSet, name: "Self")
Factory.insert(:permission_set_grant, user: user, target_user: user,
permission_set: permission_set)
conn =
post conn, iam_user_phone_number_path(conn, :create, user.id),
phone_number: %{value: "905111111111"}
assert conn.status == 201
end
test "should be 403 create a user phone_number with other user identifier, " <>
"valid params and self permission", %{conn: conn, user: user} do
permission_set = Repo.get_by!(PermissionSet, name: "Self")
Factory.insert(:permission_set_grant, user: user, target_user: user,
permission_set: permission_set)
user = Factory.insert(:user)
conn =
post conn, iam_user_phone_number_path(conn, :create, user.id),
phone_number: %{value: "905111111111"}
assert conn.status == 403
end
test "replace a user phone_number with given identifier, valid params " <>
"and self permission",
%{conn: conn, user: user} do
permission_set = Repo.get_by!(PermissionSet, name: "Self")
Factory.insert(:permission_set_grant, user: user, target_user: user,
permission_set: permission_set)
phone_number = Factory.insert(:user_phone_number, user: user)
{:ok, "1"} =
Rediscl.Query.sadd("#{@redis_keys[:user].phone_number.all}:#{user.id}",
[Jason.encode!(phone_number)])
conn =
put conn, iam_user_phone_number_path(conn, :update, user.id,
phone_number.id),
phone_number: %{value: "905111111111"}
assert conn.status == 200
end
test "should be 403 error replace a user phone_number with other " <>
"user identifier, valid params and self permission",
%{conn: conn, user: user} do
permission_set = Repo.get_by!(PermissionSet, name: "Self")
Factory.insert(:permission_set_grant, user: user, target_user: user,
permission_set: permission_set)
user = Factory.insert(:user)
phone_number = Factory.insert(:user_phone_number, user: user)
conn =
put conn, iam_user_phone_number_path(conn, :update, user.id, phone_number.id),
phone_number: %{value: "905111111111"}
assert conn.status == 403
end
test "delete a user phone_number with given identifier and self permission",
%{conn: conn, user: user} do
permission_set = Repo.get_by!(PermissionSet, name: "Self")
Factory.insert(:permission_set_grant, user: user, target_user: user,
permission_set: permission_set)
phone_number = Factory.insert(:user_phone_number, user: user)
{:ok, "1"} =
Rediscl.Query.sadd("#{@redis_keys[:user].phone_number.all}:#{user.id}",
[Jason.encode!(phone_number)])
{:ok, "OK"} =
Rediscl.Query.set("#{@redis_keys[:user].phone_number.one}:" <>
"#{user.id}:#{phone_number.id}", Jason.encode!(phone_number))
conn =
delete conn, iam_user_phone_number_path(conn, :delete, user.id,
phone_number.id)
assert conn.status == 204
end
test "should be 403 error delete a user phone_number with other user " <>
"identifier and self permission",
%{conn: conn, user: user} do
permission_set = Repo.get_by!(PermissionSet, name: "Self")
Factory.insert(:permission_set_grant, user: user, target_user: user,
permission_set: permission_set)
user = Factory.insert(:user)
phone_number = Factory.insert(:user_phone_number, user: user)
conn =
delete conn, iam_user_phone_number_path(conn, :delete, user.id,
phone_number.id)
assert conn.status == 403
end
end | 32.967742 | 81 | 0.720809 |
7996af50570ef75c27826898cd2822ada555f4d0 | 4,365 | ex | Elixir | lib/memo_web/controllers/user_auth.ex | ashkan18/memo | da62914abff2f4f4c75ad6b996e3f6c3d5e9ad64 | [
"MIT"
] | null | null | null | lib/memo_web/controllers/user_auth.ex | ashkan18/memo | da62914abff2f4f4c75ad6b996e3f6c3d5e9ad64 | [
"MIT"
] | null | null | null | lib/memo_web/controllers/user_auth.ex | ashkan18/memo | da62914abff2f4f4c75ad6b996e3f6c3d5e9ad64 | [
"MIT"
] | null | null | null | defmodule MemoWeb.UserAuth do
import Plug.Conn
import Phoenix.Controller
alias Memo.Accounts
alias MemoWeb.Router.Helpers, as: Routes
# Make the remember me cookie valid for 60 days.
# If you want bump or reduce this value, also change
# the token expiry itself in UserToken.
@max_age 60 * 60 * 24 * 60
@remember_me_cookie "_memo_web_user_remember_me"
@remember_me_options [sign: true, max_age: @max_age, same_site: "Lax"]
@doc """
Logs the user in.
It renews the session ID and clears the whole session
to avoid fixation attacks. See the renew_session
function to customize this behaviour.
It also sets a `:live_socket_id` key in the session,
so LiveView sessions are identified and automatically
disconnected on log out. The line can be safely removed
if you are not using LiveView.
"""
def log_in_user(conn, user, params \\ %{}) do
token = Accounts.generate_user_session_token(user)
user_return_to = get_session(conn, :user_return_to)
conn
|> renew_session()
|> put_session(:user_token, token)
|> put_session(:live_socket_id, "users_sessions:#{Base.url_encode64(token)}")
|> maybe_write_remember_me_cookie(token, params)
|> redirect(to: user_return_to || signed_in_path(conn))
end
defp maybe_write_remember_me_cookie(conn, token, %{"remember_me" => "true"}) do
put_resp_cookie(conn, @remember_me_cookie, token, @remember_me_options)
end
defp maybe_write_remember_me_cookie(conn, _token, _params) do
conn
end
# This function renews the session ID and erases the whole
# session to avoid fixation attacks. If there is any data
# in the session you may want to preserve after log in/log out,
# you must explicitly fetch the session data before clearing
# and then immediately set it after clearing, for example:
#
# defp renew_session(conn) do
# preferred_locale = get_session(conn, :preferred_locale)
#
# conn
# |> configure_session(renew: true)
# |> clear_session()
# |> put_session(:preferred_locale, preferred_locale)
# end
#
defp renew_session(conn) do
conn
|> configure_session(renew: true)
|> clear_session()
end
@doc """
Logs the user out.
It clears all session data for safety. See renew_session.
"""
def log_out_user(conn) do
user_token = get_session(conn, :user_token)
user_token && Accounts.delete_session_token(user_token)
if live_socket_id = get_session(conn, :live_socket_id) do
MemoWeb.Endpoint.broadcast(live_socket_id, "disconnect", %{})
end
conn
|> renew_session()
|> delete_resp_cookie(@remember_me_cookie)
|> redirect(to: "/")
end
@doc """
Authenticates the user by looking into the session
and remember me token.
"""
def fetch_current_user(conn, _opts) do
{user_token, conn} = ensure_user_token(conn)
user = user_token && Accounts.get_user_by_session_token(user_token)
assign(conn, :current_user, user)
end
defp ensure_user_token(conn) do
if user_token = get_session(conn, :user_token) do
{user_token, conn}
else
conn = fetch_cookies(conn, signed: [@remember_me_cookie])
if user_token = conn.cookies[@remember_me_cookie] do
{user_token, put_session(conn, :user_token, user_token)}
else
{nil, conn}
end
end
end
@doc """
Used for routes that require the user to not be authenticated.
"""
def redirect_if_user_is_authenticated(conn, _opts) do
if conn.assigns[:current_user] do
conn
|> redirect(to: signed_in_path(conn))
|> halt()
else
conn
end
end
@doc """
Used for routes that require the user to be authenticated.
If you want to enforce the user email is confirmed before
they use the application at all, here would be a good place.
"""
def require_authenticated_user(conn, _opts) do
if conn.assigns[:current_user] do
conn
else
conn
|> put_flash(:error, "You must log in to access this page.")
|> maybe_store_return_to()
|> redirect(to: Routes.user_session_path(conn, :new))
|> halt()
end
end
defp maybe_store_return_to(%{method: "GET"} = conn) do
put_session(conn, :user_return_to, current_path(conn))
end
defp maybe_store_return_to(conn), do: conn
defp signed_in_path(_conn), do: "/"
end
| 29.1 | 81 | 0.6937 |
799706d81ea1daf3ecedb199a454a15f7ccc98b0 | 4,823 | exs | Elixir | lib/makeup_demo/examples/elixir/sandbox.exs | elixir-makeup/makeup_demo | dcb524d8b6b44885ac51486daffa62db3f7b413f | [
"Apache-2.0"
] | null | null | null | lib/makeup_demo/examples/elixir/sandbox.exs | elixir-makeup/makeup_demo | dcb524d8b6b44885ac51486daffa62db3f7b413f | [
"Apache-2.0"
] | null | null | null | lib/makeup_demo/examples/elixir/sandbox.exs | elixir-makeup/makeup_demo | dcb524d8b6b44885ac51486daffa62db3f7b413f | [
"Apache-2.0"
] | null | null | null | # Numbers
0b0101011
1234 ; 0x1A ; 0xbeef ; 0763 ; 0o123
3.14 ; 5.0e21 ; 0.5e-12
100_000_000
# these are not valid numbers
0b012 ; 0xboar ; 0o888
0B01 ; 0XAF ; 0O123
# Characters
?a ; ?1 ; ?\n ; ?\s ; ?\c ; ? ; ?,
?\x{12} ; ?\x{abcd}
?\x34 ; ?\xF
# these show that only the first digit is part of the character
?\123 ; ?\12 ; ?\7
# Atoms
:this ; :that
:'complex atom'
:"with' \"\" 'quotes"
:" multi
line ' \s \123 \xff
atom"
:... ; :<<>> ; :%{} ; :% ; :{}
:++; :--; :*; :~~~; :::
:% ; :. ; :<-
# Strings
"Hello world"
"Interspersed \x{ff} codes \7 \8 \65 \016 and \t\s\\s\z\+ \\ escapes"
"Quotes ' inside \" \123 the \"\" \xF \\xF string \\\" end"
"Multiline
string"
# Char lists
'this is a list'
'escapes \' \t \\\''
'Multiline
char
list
'
# Binaries
<<1, 2, 3>>
<<"hello"::binary, c :: utf8, x::[4, unit(2)]>> = "helloâ„¢1"
# Sigils
~r/this + i\s "a" regex/
~R'this + i\s "a" regex too'
~w(hello #{ ["has" <> "123", '\c\d', "\123 interpol" | []] } world)s
~W(hello #{no "123" \c\d \123 interpol} world)s
~s{Escapes terminators \{ and \}, but no {balancing} # outside of sigil here }
~S"No escapes \s\t\n and no #{interpolation}"
:"atoms work #{"to" <> "o"}"
# Operators
x = 1 + 2.0 * 3
y = true and false; z = false or true
... = 144
... == !x && y || z
"hello" |> String.upcase |> String.downcase()
{^z, a} = {true, x}
# Free operators (added in 1.0.0)
p ~>> f = bind(p, f)
p1 ~> p2 = pair_right(p1, p2)
p1 <~ p2 = pair_left(p1, p2)
p1 <~> p2 = pair_both(p1, p2)
p |~> f = map(p, f)
p1 <|> p2 = either(p1, p2)
# Lists, tuples, maps, keywords
[1, :a, 'hello'] ++ [2, 3]
[:head | [?t, ?a, ?i, ?l]]
{:one, 2.0, "three"}
[...: "this", <<>>: "is", %{}: "a keyword", %: "list", {}: "too"]
["this is an atom too": 1, "so is this": 2]
[option: "value", key: :word]
[++: "operator", ~~~: :&&&]
map = %{shortcut: "syntax"}
%{map | "update" => "me"}
%{ 12 => 13, :weird => ['thing'] }
# Comprehensions
for x <- 1..10, x < 5, do: {x, x}
pixels = "12345678"
for << <<r::4, g::4, b::4, a::size(4)>> <- pixels >> do
[r, {g, %{"b" => a}}]
end
# String interpolation
"String #{inspect "interpolation"} is quite #{1+4+7} difficult"
# Identifiers
abc_123 = 1
_018OP = 2
A__0 == 3
# Modules
defmodule Long.Module.Name do
@moduledoc "Simple module docstring"
@doc """
Multiline docstring
"with quotes"
and #{ inspect %{"interpolation" => "in" <> "action"} }
now with #{ {:a, 'tuple'} }
and #{ inspect {
:tuple,
%{ with: "nested #{ inspect %{ :interpolation => %{} } }" }
} }
"""
defstruct [:a, :name, :height]
@doc ~S'''
No #{interpolation} of any kind.
\000 \x{ff}
\n #{\x{ff}}
'''
def func(a, b \\ []), do: :ok
# Function
def f(x), do: x
# Operator definition (don't highlight the `x`!)
def x + y, do: nil
def x * y, do: nil
def x |> y, do: nil
def x && y, do: nil
def x || y, do: nil
# Word operators
def x and y, do: nil
def x or y, do: nil
def x in y, do: nil
# unquote, quote and unquote_splicing:
def quote(f)(x), do: nil
def unquote(f)(x), do: nil
def unquote_splicing(f)(x), do: nil
# function name that starts with `quote`:
def quote_me(x), do: nil
@doc false
def __before_compile__(_) do
:ok
end
end
# Structs
defmodule Second.Module do
s = %Long.Module.Name{name: "Silly"}
%Long.Module.Name{s | height: {192, :cm}}
".. #{%Long.Module.Name{s | height: {192, :cm}}} .."
end
# Types, pseudo-vars, attributes
defmodule M do
@custom_attr :some_constant
@before_compile Long.Module.Name
@typedoc "This is a type"
@type typ :: integer
@typedoc """
Another type
"""
@opaque typtyp :: 1..10
@spec func(typ, typtyp) :: :ok | :fail
def func(a, b) do
a || b || :ok || :fail
Path.expand("..", __DIR__)
IO.inspect __ENV__
__NOTAPSEUDOVAR__ = 11
__MODULE__.func(b, a)
end
defmacro m() do
__CALLER__
end
end
# Functions
anon = fn x, y, z ->
fn(a, b, c) ->
&(x + y - z * a / &1 + b + div(&2, c))
end
end
&Set.put(&1, &2) ; & Set.put(&1, &2) ; &( Set.put(&1, &1) )
# Function calls
anon.(1, 2, 3); self; hd([1,2,3])
Kernel.spawn(fn -> :ok end)
IO.ANSI.black
# Control flow
if :this do
:that
else
:otherwise
end
pid = self
receive do
{:EXIT, _} -> :done
{^pid, :_} -> nil
after 100 -> :no_luck
end
case __ENV__.line do
x when is_integer(x) -> x
x when x in 1..12 -> -x
end
cond do
false -> "too bad"
4 > 5 -> "oops"
true -> nil
end
# Lexical scope modifiers
import Kernel, except: [spawn: 1, +: 2, /: 2, Unless: 2]
alias Long.Module.Name, as: N0men123_and4
use Bitwise
4 &&& 5
2 <<< 3
# Protocols
defprotocol Useless do
def func1(this)
def func2(that)
end
defimpl Useless, for: Atom do
end
# Exceptions
defmodule NotAnError do
defexception [:message]
end
raise NotAnError, message: "This is not an error"
| 19.063241 | 78 | 0.560854 |
79971cb1900210faa7614b710a5ff53217aa0fb3 | 1,138 | ex | Elixir | lib/id_token/firebase.ex | Joe-noh/id_token | 8486b97dc45f871222a57ba97eca1ef85061daea | [
"MIT"
] | null | null | null | lib/id_token/firebase.ex | Joe-noh/id_token | 8486b97dc45f871222a57ba97eca1ef85061daea | [
"MIT"
] | null | null | null | lib/id_token/firebase.ex | Joe-noh/id_token | 8486b97dc45f871222a57ba97eca1ef85061daea | [
"MIT"
] | null | null | null | defmodule IDToken.Firebase do
@moduledoc """
Predefined callback module for firebase users.
"""
@behaviour IDToken.Callback
@impl true
def fetch_certificates do
with url = "https://www.googleapis.com/robot/v1/metadata/x509/[email protected]",
{:ok, %Mojito.Response{body: body, headers: headers}} <- Mojito.request(:get, url),
{:ok, keys} <- Jason.decode(body),
{:ok, expires_at} <- expires_at(headers) do
{:ok, %IDToken.Certificate{body: keys, expires_at: expires_at}}
else
tuple = {:error, _} -> tuple
reason -> {:error, reason}
end
end
@impl true
def verification_key(%IDToken.Certificate{body: body}, %{"kid" => kid}) do
Map.get(body, kid)
end
defp expires_at([{"cache-control", cache_control} | _]) do
[_entire_matched, max_age] = Regex.run(~r[max-age=(\d+)], cache_control)
expire = DateTime.utc_now() |> DateTime.add(String.to_integer(max_age), :second)
{:ok, expire}
end
defp expires_at([]) do
{:error, :cache_control_not_found}
end
defp expires_at([_ | tail]) do
expires_at(tail)
end
end
| 27.756098 | 106 | 0.652021 |
79973e19b9078551334219fcc8331bf8956298ae | 194 | ex | Elixir | lib/stacky.ex | cthree/stacky | 12203434cc60d4b3299cf8e9f1daef414a27380b | [
"MIT"
] | null | null | null | lib/stacky.ex | cthree/stacky | 12203434cc60d4b3299cf8e9f1daef414a27380b | [
"MIT"
] | null | null | null | lib/stacky.ex | cthree/stacky | 12203434cc60d4b3299cf8e9f1daef414a27380b | [
"MIT"
] | null | null | null | defmodule Stacky do
@moduledoc """
Documentation for Stacky.
"""
@doc """
Hello world.
## Examples
iex> Stacky.hello
:world
"""
def hello do
:world
end
end
| 10.210526 | 27 | 0.561856 |
799757479d088c31beb8d8af79bdb6cf4beca35a | 207 | exs | Elixir | test/controllers/page_controller_test.exs | tsara27/collab-x-phoenix | 828f8fbdcf853a43e096a42dc2f003cf443eb792 | [
"MIT"
] | null | null | null | test/controllers/page_controller_test.exs | tsara27/collab-x-phoenix | 828f8fbdcf853a43e096a42dc2f003cf443eb792 | [
"MIT"
] | null | null | null | test/controllers/page_controller_test.exs | tsara27/collab-x-phoenix | 828f8fbdcf853a43e096a42dc2f003cf443eb792 | [
"MIT"
] | null | null | null | defmodule CollabXPhoenix.PageControllerTest do
use CollabXPhoenix.ConnCase
test "GET /", %{conn: conn} do
conn = get conn, "/"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 23 | 60 | 0.695652 |
79975d8557b6d5e59e31a4e458011d4fef72e4d5 | 216 | ex | Elixir | lib/rpunit/rpunit.ex | marick/eecrit | 50b1ebeadc5cf21ea9f9df6add65e4d7037e2482 | [
"MIT"
] | 10 | 2016-07-15T15:57:33.000Z | 2018-06-09T00:40:46.000Z | lib/rpunit/rpunit.ex | marick/eecrit | 50b1ebeadc5cf21ea9f9df6add65e4d7037e2482 | [
"MIT"
] | null | null | null | lib/rpunit/rpunit.ex | marick/eecrit | 50b1ebeadc5cf21ea9f9df6add65e4d7037e2482 | [
"MIT"
] | 6 | 2016-07-15T15:57:41.000Z | 2018-03-22T16:38:00.000Z | defmodule RoundingPegs.ExUnit do
defmacro __using__(_) do
quote do
import RoundingPegs.ExUnit.Macros
import RoundingPegs.ExUnit.Assertions
import ShouldI, only: [assign: 2]
end
end
end
| 19.636364 | 43 | 0.708333 |
79976cf79e6b3749f651f1ae0eb06614f9a67be8 | 168 | exs | Elixir | priv/repo/migrations/20180627034657_add_antitle.exs | roger120981/planet | a662006551d11427c08cf6cdbacd37d377bcd9c5 | [
"MIT"
] | 1 | 2019-04-01T19:14:16.000Z | 2019-04-01T19:14:16.000Z | priv/repo/migrations/20180627034657_add_antitle.exs | roger120981/planet | a662006551d11427c08cf6cdbacd37d377bcd9c5 | [
"MIT"
] | null | null | null | priv/repo/migrations/20180627034657_add_antitle.exs | roger120981/planet | a662006551d11427c08cf6cdbacd37d377bcd9c5 | [
"MIT"
] | 1 | 2019-03-24T01:50:48.000Z | 2019-03-24T01:50:48.000Z | defmodule Excommerce.Repo.Migrations.AddAntitle do
use Ecto.Migration
def change do
alter table(:products) do
add :ante_title, :string
end
end
end
| 16.8 | 50 | 0.714286 |
799781f7e863d090a31dc305b83faa9248a30e68 | 231 | exs | Elixir | priv/repo/migrations/20170526230321_create_user.exs | mcampa/bigcommerce-elixir-app | f1fe40b2a456b49c09970a5317108316f054d5ec | [
"MIT"
] | 1 | 2017-06-02T07:17:22.000Z | 2017-06-02T07:17:22.000Z | priv/repo/migrations/20170526230321_create_user.exs | mcampa/bigcommerce-elixir-app | f1fe40b2a456b49c09970a5317108316f054d5ec | [
"MIT"
] | null | null | null | priv/repo/migrations/20170526230321_create_user.exs | mcampa/bigcommerce-elixir-app | f1fe40b2a456b49c09970a5317108316f054d5ec | [
"MIT"
] | null | null | null | defmodule App.Repo.Migrations.CreateUser do
use Ecto.Migration
def change do
create table(:users) do
add :user_id, :string
add :context, :string
add :token, :string
timestamps()
end
end
end
| 15.4 | 43 | 0.640693 |
7997ae1ec195d4f6562af49a397a297868ee27a7 | 1,322 | exs | Elixir | mix.exs | aforward-oss/earmark | b44d4817aa2b4047b07f91d633ae83ed27c695ed | [
"Apache-2.0"
] | null | null | null | mix.exs | aforward-oss/earmark | b44d4817aa2b4047b07f91d633ae83ed27c695ed | [
"Apache-2.0"
] | null | null | null | mix.exs | aforward-oss/earmark | b44d4817aa2b4047b07f91d633ae83ed27c695ed | [
"Apache-2.0"
] | null | null | null | Code.eval_file "tasks/readme.exs"
defmodule Earmark.Mixfile do
use Mix.Project
def project do
[
app: :earmark,
version: "0.2.2",
elixir: "~> 1.2",
elixirc_paths: elixirc_paths(Mix.env),
escript: escript_config(),
deps: deps(),
description: description(),
package: package(),
]
end
def application do
[applications: []]
end
defp deps do
[{:poison, "~> 2.1", only: [:dev, :test]},
{:kwfuns, "~> 0.0", only: :test}]
end
defp description do
"""
Earmark is a pure-Elixir Markdown converter.
It is intended to be used as a library (just call Earmark.to_html),
but can also be used as a command-line tool (just run mix escript.build
first).
Output generation is pluggable.
"""
end
defp package do
[
files: [ "lib", "tasks", "mix.exs", "README.md" ],
maintainers: [ "Dave Thomas <[email protected]>"],
licenses: [ "Same as Elixir" ],
links: %{
"GitHub" => "https://github.com/pragdave/earmark",
}
]
end
defp escript_config do
[ main_module: Earmark.CLI ]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
end
| 22.793103 | 75 | 0.549924 |
7997aee078dc158d679c731c3ef1cf62104426ca | 1,093 | ex | Elixir | lib/cforum/jobs/image_resizer_job.ex | MatthiasApsel/cforum_ex | 52c621a583182d82692b74694b0b2792ac23b8ff | [
"MIT"
] | null | null | null | lib/cforum/jobs/image_resizer_job.ex | MatthiasApsel/cforum_ex | 52c621a583182d82692b74694b0b2792ac23b8ff | [
"MIT"
] | null | null | null | lib/cforum/jobs/image_resizer_job.ex | MatthiasApsel/cforum_ex | 52c621a583182d82692b74694b0b2792ac23b8ff | [
"MIT"
] | null | null | null | defmodule Cforum.Jobs.ImageResizerJob do
use Oban.Worker, queue: :media, max_attempts: 5
alias Cforum.Media
def enqueue(img) do
%{"medium_id" => img.medium_id}
|> Cforum.Jobs.ImageResizerJob.new()
|> Oban.insert!()
end
@impl Oban.Worker
def perform(%{"medium_id" => id}, _) do
if Application.get_env(:cforum, :environment) != :test do
img = Media.get_image!(id)
resize_image(img, "thumb")
resize_image(img, "medium")
end
end
defp resize_image(img, version) do
arguments = convert_arguments(img, version)
convert = Application.get_env(:cforum, :convert)
Porcelain.exec(convert, arguments)
end
defp convert_arguments(img, version) do
orig_path = Media.future_image_path(img, "orig")
version_path = Media.future_image_path(img, version)
[
orig_path,
"-auto-orient",
"-strip",
args_by_version(version),
version_path
]
|> List.flatten()
end
defp args_by_version("thumb"), do: ["-thumbnail", "100x100>"]
defp args_by_version("medium"), do: ["-scale", "800x600>"]
end
| 25.418605 | 63 | 0.655993 |
7997c9e99c9e94ed9449c27dec72587683d1f01b | 1,738 | ex | Elixir | robotica_common/lib/mark.ex | brianmay/robotica-elixir | 8656510e54b7e32a547e3a54bf946f0e327911c9 | [
"RSA-MD"
] | 1 | 2019-04-23T09:16:44.000Z | 2019-04-23T09:16:44.000Z | robotica_common/lib/mark.ex | brianmay/robotica-elixir | 8656510e54b7e32a547e3a54bf946f0e327911c9 | [
"RSA-MD"
] | 107 | 2019-05-26T08:03:26.000Z | 2022-02-03T19:13:56.000Z | robotica_common/lib/mark.ex | brianmay/robotica-elixir | 8656510e54b7e32a547e3a54bf946f0e327911c9 | [
"RSA-MD"
] | 1 | 2019-08-10T20:44:24.000Z | 2019-08-10T20:44:24.000Z | defmodule RoboticaCommon.Mark do
@moduledoc """
Defines functions for marks
"""
use RoboticaCommon.EventBus
alias RoboticaCommon.Date
@derive Jason.Encoder
@type t :: %__MODULE__{
id: String.t(),
status: :done | :cancelled,
start_time: %DateTime{},
stop_time: %DateTime{}
}
@enforce_keys [:id, :status, :start_time, :stop_time]
defstruct id: nil,
status: nil,
start_time: nil,
stop_time: nil
@spec publish_mark(RoboticaCommon.Mark.t()) :: :ok
def publish_mark(%RoboticaCommon.Mark{} = mark) do
RoboticaCommon.EventBus.notify(:mark, mark)
end
@spec mark_task(RoboticaCommon.ScheduledStep.t(), :done | :cancelled | :clear) :: :error | :ok
def mark_task(%RoboticaCommon.ScheduledStep{} = step, status) do
id = step.id
now = DateTime.utc_now()
prev_midnight = Date.today(step.required_time) |> Date.midnight_utc()
next_midnight = Date.tomorrow(step.required_time) |> Date.midnight_utc()
mark =
case status do
:done ->
%RoboticaCommon.Mark{
id: id,
status: :done,
start_time: prev_midnight,
stop_time: next_midnight
}
:cancelled ->
%RoboticaCommon.Mark{
id: id,
status: :cancelled,
start_time: prev_midnight,
stop_time: next_midnight
}
:clear ->
%RoboticaCommon.Mark{
id: id,
status: :done,
start_time: now,
stop_time: now
}
_ ->
nil
end
case mark do
nil ->
:error
_ ->
publish_mark(mark)
end
end
end
| 23.808219 | 96 | 0.555236 |
7997db2858f484105f7196cac5b2943e53a92d14 | 10,023 | ex | Elixir | deps/tzdata/lib/tzdata.ex | robot-overlord/starter_kit | 254153221d0a3a06324c65ad8e89d610de2429c3 | [
"MIT"
] | 1 | 2020-01-31T10:23:37.000Z | 2020-01-31T10:23:37.000Z | deps/tzdata/lib/tzdata.ex | robot-overlord/starter_kit | 254153221d0a3a06324c65ad8e89d610de2429c3 | [
"MIT"
] | null | null | null | deps/tzdata/lib/tzdata.ex | robot-overlord/starter_kit | 254153221d0a3a06324c65ad8e89d610de2429c3 | [
"MIT"
] | null | null | null | defmodule Tzdata do
@moduledoc """
The Tzdata module provides data from the IANA tz database. Also known
as the Olson/Eggert database, zoneinfo, tzdata and other names.
A list of time zone names (e.g. `America/Los_Angeles`) are provided.
As well as functions for finding out the UTC offset, abbreviation,
standard offset (DST) for a specific point in time in a certain
timezone.
"""
@doc """
zone_list provides a list of all the zone names that can be used with
DateTime. This includes aliases.
"""
def zone_list, do: Tzdata.ReleaseReader.zone_and_link_list
@doc """
Like zone_list, but excludes aliases for zones.
"""
def canonical_zone_list, do: Tzdata.ReleaseReader.zone_list
@doc """
A list of aliases for zone names. For instance Europe/Jersey
is an alias for Europe/London. Aliases are also known as linked zones.
"""
def zone_alias_list, do: Tzdata.ReleaseReader.link_list
@doc """
Takes the name of a zone. Returns true if zone exists. Otherwise false.
iex> Tzdata.zone_exists? "Pacific/Auckland"
true
iex> Tzdata.zone_exists? "America/Sao_Paulo"
true
iex> Tzdata.zone_exists? "Europe/Jersey"
true
"""
def zone_exists?(name), do: Enum.member?(zone_list(), name)
@doc """
Takes the name of a zone. Returns true if zone exists and is canonical.
Otherwise false.
iex> Tzdata.canonical_zone? "Europe/London"
true
iex> Tzdata.canonical_zone? "Europe/Jersey"
false
"""
def canonical_zone?(name), do: Enum.member?(canonical_zone_list(), name)
@doc """
Takes the name of a zone. Returns true if zone exists and is an alias.
Otherwise false.
iex> Tzdata.zone_alias? "Europe/Jersey"
true
iex> Tzdata.zone_alias? "Europe/London"
false
"""
def zone_alias?(name), do: Enum.member?(zone_alias_list(), name)
@doc """
Returns a map of links. Also known as aliases.
iex> Tzdata.links["Europe/Jersey"]
"Europe/London"
"""
def links, do: Tzdata.ReleaseReader.links
@doc """
Returns a map with keys being group names and the values lists of
time zone names. The group names mirror the file names used by the tzinfo
database.
"""
def zone_lists_grouped, do: Tzdata.ReleaseReader.by_group
@doc """
Returns tzdata release version as a string.
Example:
Tzdata.tzdata_version
"2014i"
"""
def tzdata_version, do: Tzdata.ReleaseReader.release_version
@doc """
Returns a list of periods for the `zone_name` provided as an argument.
A period in this case is a period of time where the UTC offset and standard
offset are in a certain way. When they change, for instance in spring when
DST takes effect, a new period starts. For instance a period can begin in
spring when winter time ends and summer time begins. The period lasts until
DST ends.
If either the UTC or standard offset change for any reason, a new period
begins. For instance instead of DST ending or beginning, a rule change
that changes the UTC offset will also mean a new period.
The result is tagged with :ok if the zone_name is correct.
The from and until times can be :mix, :max or gregorian seconds.
## Example
iex> Tzdata.periods("Europe/Madrid") |> elem(1) |> Enum.take(1)
[%{from: %{standard: :min, utc: :min, wall: :min}, std_off: 0,
until: %{standard: 59989763760, utc: 59989764644, wall: 59989763760},
utc_off: -884, zone_abbr: "LMT"}]
iex> Tzdata.periods("Not existing")
{:error, :not_found}
"""
def periods(zone_name) do
{tag, p} = Tzdata.ReleaseReader.periods_for_zone_or_link(zone_name)
case tag do
:ok ->
mapped_p = p |> Enum.map(fn {_, f_utc, f_wall, f_std, u_utc, u_wall, u_std, utc_off, std_off, zone_abbr}->
%{
std_off: std_off,
utc_off: utc_off,
from: %{utc: f_utc, wall: f_wall, standard: f_std},
until: %{utc: u_utc, standard: u_std, wall: u_wall},
zone_abbr: zone_abbr
}
end)
{:ok, mapped_p}
_ -> {:error, p}
end
end
@doc """
Get the periods that cover a certain point in time. Usually it will be a list
with just one period. But in some cases it will be zero or two periods. For
instance when going from summer to winter time (DST to standard time) there
will be an overlap if `time_type` is `:wall`.
`zone_name` should be a valid time zone name. The function `zone_list/0`
provides a valid list of valid zone names.
`time_point` is the point in time in gregorian seconds (see erlang
calendar module documentation for more info on gregorian seconds).
Valid values for `time_type` is `:utc`, `:wall` or `:standard`.
## Examples
# 63555753600 seconds is equivalent to {{2015, 1, 1}, {0, 0, 0}}
iex> Tzdata.periods_for_time("Asia/Tokyo", 63587289600, :wall)
[%{from: %{standard: 61589206800, utc: 61589174400, wall: 61589206800}, std_off: 0,
until: %{standard: :max, utc: :max, wall: :max}, utc_off: 32400, zone_abbr: "JST"}]
# 63612960000 seconds is equivalent to 2015-10-25 02:40:00 and is an ambiguous
# wall time for the zone. So two possible periods will be returned.
iex> Tzdata.periods_for_time("Europe/Copenhagen", 63612960000, :wall)
[%{from: %{standard: 63594813600, utc: 63594810000, wall: 63594817200}, std_off: 3600,
until: %{standard: 63612957600, utc: 63612954000, wall: 63612961200}, utc_off: 3600, zone_abbr: "CEST"},
%{from: %{standard: 63612957600, utc: 63612954000, wall: 63612957600}, std_off: 0,
until: %{standard: 63626263200, utc: 63626259600, wall: 63626263200}, utc_off: 3600, zone_abbr: "CET"}]
# 63594816000 seconds is equivalent to 2015-03-29 02:40:00 and is a
# non-existing wall time for the zone. It is spring and the clock skips that hour.
iex> Tzdata.periods_for_time("Europe/Copenhagen", 63594816000, :wall)
[]
"""
def periods_for_time(zone_name, time_point, time_type) do
{:ok, periods} = possible_periods_for_zone_and_time(zone_name, time_point)
periods
|> consecutive_matching(fn x ->
((Map.get(x.from, time_type) |>smaller_than_or_equals(time_point))
&& (Map.get(x.until, time_type) |>bigger_than(time_point)))
end)
end
# Like Enum.filter, but returns the first consecutive result.
# If we have found consecutive matches we do not need to look at the
# remaining list.
defp consecutive_matching(list, fun) do
do_consecutive_matching(list, fun, [], false)
end
defp do_consecutive_matching([], _fun, [], _did_last_match), do: []
defp do_consecutive_matching([], _fun, matched, _did_last_match), do: matched
defp do_consecutive_matching(_list, _fun, matched, false) when length(matched) > 0 do
# If there are matches and previous did not match then the matches are no
# long consecutive. So we return the result.
matched |> Enum.reverse
end
defp do_consecutive_matching([h|t], fun, matched, _did_last_match) do
if fun.(h) == true do
do_consecutive_matching(t, fun, [h|matched], true)
else
do_consecutive_matching(t, fun, matched, false)
end
end
# Use dynamic periods for points in time that are about 40 years into the future
@years_in_the_future_where_precompiled_periods_are_used 40
@point_from_which_to_use_dynamic_periods :calendar.datetime_to_gregorian_seconds {{(:calendar.universal_time|>elem(0)|>elem(0)) + @years_in_the_future_where_precompiled_periods_are_used, 1, 1}, {0, 0, 0}}
defp possible_periods_for_zone_and_time(zone_name, time_point) when time_point >= @point_from_which_to_use_dynamic_periods do
if Tzdata.FarFutureDynamicPeriods.zone_in_30_years_in_eternal_period?(zone_name) do
periods(zone_name)
else
link_status = Tzdata.ReleaseReader.links |> Map.get(zone_name)
if link_status == nil do
Tzdata.FarFutureDynamicPeriods.periods_for_point_in_time(time_point, zone_name)
else
possible_periods_for_zone_and_time(link_status, time_point)
end
end
end
defp possible_periods_for_zone_and_time(zone_name, _time_point) do
periods(zone_name)
end
@doc """
Get a list of maps with known leap seconds and
the difference between UTC and the TAI in seconds.
See also `leap_seconds/1`
## Example
iex> Tzdata.leap_seconds_with_tai_diff |> Enum.take(3)
[%{date_time: {{1971, 12, 31}, {23, 59, 60}}, tai_diff: 10},
%{date_time: {{1972, 6, 30}, {23, 59, 60}}, tai_diff: 11},
%{date_time: {{1972, 12, 31}, {23, 59, 60}}, tai_diff: 12}]
"""
def leap_seconds_with_tai_diff do
leap_seconds_data = Tzdata.ReleaseReader.leap_sec_data
leap_seconds_data[:leap_seconds]
end
@doc """
Get a list of known leap seconds. The leap seconds are datetime
tuples representing the extra leap second to be inserted.
The date-times are in UTC.
See also `leap_seconds_with_tai_diff/1`
## Example
iex> Tzdata.leap_seconds |> Enum.take(3)
[{{1971, 12, 31}, {23, 59, 60}},
{{1972, 6, 30}, {23, 59, 60}},
{{1972, 12, 31}, {23, 59, 60}}]
"""
def leap_seconds do
leap_seconds_data = Tzdata.ReleaseReader.leap_sec_data
just_leap_seconds = leap_seconds_data[:leap_seconds]
|> Enum.map(&(Map.get(&1, :date_time)))
just_leap_seconds
end
@doc """
The time when the leap second information returned from the other leap second
related function expires. The date-time is in UTC.
## Example
Tzdata.leap_second_data_valid_until
{{2015, 12, 28}, {0, 0, 0}}
"""
def leap_second_data_valid_until do
leap_seconds_data = Tzdata.ReleaseReader.leap_sec_data
leap_seconds_data[:valid_until]
end
defp smaller_than_or_equals(:min, _), do: true
defp smaller_than_or_equals(first, second), do: first <= second
defp bigger_than(:max, _), do: true
defp bigger_than(first, second), do: first > second
end
| 36.580292 | 206 | 0.687519 |
7998108c9e669934d423fb535ac62772c74391b9 | 929 | ex | Elixir | test/fixtures/umbrella_app/apps/web/lib/web/application.ex | arikai/distillery | 65ddbcc143f2849a6ed5574e8c397a68ca92eb81 | [
"MIT"
] | 3,097 | 2016-07-18T13:59:00.000Z | 2022-03-29T00:27:23.000Z | test/fixtures/umbrella_app/apps/web/lib/web/application.ex | arikai/distillery | 65ddbcc143f2849a6ed5574e8c397a68ca92eb81 | [
"MIT"
] | 672 | 2016-07-18T18:25:29.000Z | 2022-02-24T17:39:30.000Z | test/fixtures/umbrella_app/apps/web/lib/web/application.ex | arikai/distillery | 65ddbcc143f2849a6ed5574e8c397a68ca92eb81 | [
"MIT"
] | 483 | 2016-07-22T14:08:49.000Z | 2022-03-21T09:35:23.000Z | defmodule Web.Application do
use Application
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the endpoint when the application starts
supervisor(WebWeb.Endpoint, []),
# Start your own worker by calling: Web.Worker.start_link(arg1, arg2, arg3)
# worker(Web.Worker, [arg1, arg2, arg3]),
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Web.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
WebWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 30.966667 | 81 | 0.713671 |
79981f2ec99da0a3917239168871502148d0adf4 | 470 | exs | Elixir | lib/exercism/bob/bob.exs | sprql/experimentex | 6c8a37ea03b74c5bfece1b2bec21c163a2f2df2f | [
"MIT"
] | null | null | null | lib/exercism/bob/bob.exs | sprql/experimentex | 6c8a37ea03b74c5bfece1b2bec21c163a2f2df2f | [
"MIT"
] | null | null | null | lib/exercism/bob/bob.exs | sprql/experimentex | 6c8a37ea03b74c5bfece1b2bec21c163a2f2df2f | [
"MIT"
] | null | null | null | defmodule Bob do
def hey(input) do
cond do
question?(input) -> "Sure."
nothing?(input) -> "Fine. Be that way!"
yelling?(input) -> "Whoa, chill out!"
true -> "Whatever."
end
end
defp question?(input) do
String.match?(input, ~r/\?$/)
end
defp yelling?(input) do
(String.match?(input, ~r/\pL/u) && String.upcase(input) == input)
end
def nothing?(input) do
String.match?(input, ~r/^\s*$/)
end
end
| 20.434783 | 69 | 0.553191 |
79982a6cd04c6204a17ea8e4ceb04f954e3446b9 | 422 | ex | Elixir | lib/escala/guardian_serializer.ex | vahidabdi/escala_api | 84a4a3ef832180f12c6197683933d8cd0ab35ef4 | [
"MIT"
] | null | null | null | lib/escala/guardian_serializer.ex | vahidabdi/escala_api | 84a4a3ef832180f12c6197683933d8cd0ab35ef4 | [
"MIT"
] | null | null | null | lib/escala/guardian_serializer.ex | vahidabdi/escala_api | 84a4a3ef832180f12c6197683933d8cd0ab35ef4 | [
"MIT"
] | null | null | null | defmodule Escala.GuardianSerializer do
@moduledoc """
Guardian Serializer
"""
@behaviour Guardian.Serializer
alias Escala.Accounts
alias Escala.Accounts.User
def for_token(%User{} = user), do: {:ok, "User:#{user.id}"}
def for_token(_), do: {:error, "Unknown resource type"}
def from_token("User:" <> id), do: {:ok, Accounts.get_user(id)}
def from_token(_), do: {:error, "Unknown resource type"}
end
| 26.375 | 65 | 0.682464 |
799835440489b6d6f25e8b4e28aec8b400544dd3 | 2,157 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/preserved_state_preserved_disk.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/compute/lib/google_api/compute/v1/model/preserved_state_preserved_disk.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/compute/lib/google_api/compute/v1/model/preserved_state_preserved_disk.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.PreservedStatePreservedDisk do
@moduledoc """
## Attributes
* `autoDelete` (*type:* `String.t`, *default:* `nil`) - These stateful disks will never be deleted during autohealing, update, instance recreate operations. This flag is used to configure if the disk should be deleted after it is no longer used by the group, e.g. when the given instance or the whole MIG is deleted. Note: disks attached in READ_ONLY mode cannot be auto-deleted.
* `mode` (*type:* `String.t`, *default:* `nil`) - The mode in which to attach this disk, either READ_WRITE or READ_ONLY. If not specified, the default is to attach the disk in READ_WRITE mode.
* `source` (*type:* `String.t`, *default:* `nil`) - The URL of the disk resource that is stateful and should be attached to the VM instance.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:autoDelete => String.t() | nil,
:mode => String.t() | nil,
:source => String.t() | nil
}
field(:autoDelete)
field(:mode)
field(:source)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.PreservedStatePreservedDisk do
def decode(value, options) do
GoogleApi.Compute.V1.Model.PreservedStatePreservedDisk.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.PreservedStatePreservedDisk do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.698113 | 383 | 0.728326 |
79985904506e0cbce94446335e3ecdc0b667391e | 1,293 | ex | Elixir | DL-DLR2-004/DL-DLR2-004 (R=10.0).ELEMENT-IoT.ex | Realscrat/decentlab-decoders | 3ca5006cd85e3772a15a1b3fff3922c50979eeb6 | [
"MIT"
] | 13 | 2020-01-18T22:08:44.000Z | 2022-02-06T14:19:57.000Z | DL-DLR2-004/DL-DLR2-004 (R=10.0).ELEMENT-IoT.ex | johannesE/decentlab-decoders | c290ea1218de2c82d665fdc9f71f16682e12d917 | [
"MIT"
] | 4 | 2019-05-10T07:17:41.000Z | 2021-10-20T16:24:04.000Z | DL-DLR2-004/DL-DLR2-004 (R=10.0).ELEMENT-IoT.ex | johannesE/decentlab-decoders | c290ea1218de2c82d665fdc9f71f16682e12d917 | [
"MIT"
] | 15 | 2019-06-04T06:13:32.000Z | 2022-02-15T07:28:52.000Z |
# https://www.decentlab.com/products/analog-or-digital-sensor-device-for-lorawan
defmodule Parser do
use Platform.Parsing.Behaviour
# device-specific parameters
defp r(), do: 10.0
## test payloads
# 0208b200038bb80c60
# 0208b200020c60
def fields do
[
%{field: "current", display: "Current", unit: "mA"},
%{field: "battery_voltage", display: "Battery voltage", unit: "V"}
]
end
def parse(<<2, device_id::size(16), flags::binary-size(2), words::binary>>, _meta) do
{_remaining, result} =
{words, %{:device_id => device_id, :protocol_version => 2}}
|> sensor0(flags)
|> sensor1(flags)
result
end
defp sensor0({<<x0::size(16), remaining::binary>>, result},
<<_::size(15), 1::size(1), _::size(0)>>) do
{remaining,
Map.merge(result,
%{
:current => 3 * (x0 - 32768) / 32768 / 2 / r() * 1000
})}
end
defp sensor0(result, _flags), do: result
defp sensor1({<<x0::size(16), remaining::binary>>, result},
<<_::size(14), 1::size(1), _::size(1)>>) do
{remaining,
Map.merge(result,
%{
:battery_voltage => x0 / 1000
})}
end
defp sensor1(result, _flags), do: result
end | 25.352941 | 87 | 0.550657 |
79986f472691b449f46431921086ca7b4b7f6e16 | 362 | exs | Elixir | samples/scheduler/mix.exs | thbar/elixir-playground | 4c3e8cecb927d05f762b666d3064114451786db4 | [
"MIT"
] | 14 | 2016-01-18T13:58:37.000Z | 2021-08-09T11:39:54.000Z | samples/scheduler/mix.exs | thbar/elixir-playground | 4c3e8cecb927d05f762b666d3064114451786db4 | [
"MIT"
] | null | null | null | samples/scheduler/mix.exs | thbar/elixir-playground | 4c3e8cecb927d05f762b666d3064114451786db4 | [
"MIT"
] | null | null | null | defmodule Scheduler.Mixfile do
use Mix.Project
def project do
[app: :scheduler,
version: "0.1.0",
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
def application do
[applications: [:logger, :quantum]]
end
defp deps do
[{:quantum, "~> 1.7.1"}]
end
end
| 17.238095 | 39 | 0.585635 |
799879c205c506f0c5a9065c620c89297f654b70 | 14,407 | ex | Elixir | apps/donut_graphql/lib/donut.graphql/identity/contact.ex | ZURASTA/donut | b0546c041601e619e76c10d1d2ce62fe5d1701a4 | [
"BSD-2-Clause"
] | null | null | null | apps/donut_graphql/lib/donut.graphql/identity/contact.ex | ZURASTA/donut | b0546c041601e619e76c10d1d2ce62fe5d1701a4 | [
"BSD-2-Clause"
] | null | null | null | apps/donut_graphql/lib/donut.graphql/identity/contact.ex | ZURASTA/donut | b0546c041601e619e76c10d1d2ce62fe5d1701a4 | [
"BSD-2-Clause"
] | null | null | null | defmodule Donut.GraphQL.Identity.Contact do
use Donut.GraphQL.Schema.Notation
@desc "The priority of a contact"
enum :contact_priority do
value :primary
value :secondary
end
@desc "A generic contact interface"
mutable_interface :contact do
immutable do
field :priority, non_null(:contact_priority), description: "The priority of the contact"
field :status, non_null(:verification_status), description: "The current verification status of the contact"
field :presentable, non_null(:string), description: "The presentable information about the contact"
end
@desc "Change the priority of the contact"
field :set_priority, type: result(:error) do
arg :priority, non_null(:contact_priority)
end
field :remove, result(:error), description: "Remove the email contact"
end
@desc "An email contact"
mutable_object :email_contact do
immutable do
field :priority, non_null(:contact_priority), description: "The priority of the email contact"
field :status, non_null(:verification_status), description: "The current verification status of the email contact"
field :presentable, non_null(:string), description: "The presentable information about the email contact"
field :email, non_null(:string), description: "The email address"
interface :contact
is_type_of fn
%{ email: _ } -> true
_ -> false
end
end
interface :mutable_contact
@desc "Change the priority of the email contact"
field :set_priority, type: result(:error) do
arg :priority, non_null(:contact_priority)
resolve fn
%{ priority: priority }, %{ priority: priority }, _ -> { :ok, nil }
%{ email: email }, %{ priority: priority }, %{ context: %{ identity: identity } } ->
case Sherbet.API.Contact.Email.set_priority(identity, email, priority) do
:ok -> { :ok, nil }
{ :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } }
end
end
end
@desc "Remove the email contact"
field :remove, type: result(:error) do
resolve fn
%{ email: email }, _, %{ context: %{ identity: identity } } ->
case Sherbet.API.Contact.Email.remove(identity, email) do
:ok -> { :ok, nil }
{ :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } }
end
end
end
end
@desc "A mobile contact"
mutable_object :mobile_contact do
immutable do
field :priority, non_null(:contact_priority), description: "The priority of the mobile contact"
field :status, non_null(:verification_status), description: "The current verification status of the mobile contact"
field :presentable, non_null(:string), description: "The presentable information about the mobile contact"
field :mobile, non_null(:string), description: "The mobile number"
interface :contact
is_type_of fn
%{ mobile: _ } -> true
_ -> false
end
end
interface :mutable_contact
@desc "Change the priority of the mobile contact"
field :set_priority, type: result(:error) do
arg :priority, non_null(:contact_priority)
resolve fn
%{ priority: priority }, %{ priority: priority }, _ -> { :ok, nil }
%{ mobile: mobile }, %{ priority: priority }, %{ context: %{ identity: identity } } ->
case Sherbet.API.Contact.Mobile.set_priority(identity, mobile, priority) do
:ok -> { :ok, nil }
{ :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } }
end
end
end
@desc "Remove the mobile contact"
field :remove, type: result(:error) do
resolve fn
%{ mobile: mobile }, _, %{ context: %{ identity: identity } } ->
case Sherbet.API.Contact.Mobile.remove(identity, mobile) do
:ok -> { :ok, nil }
{ :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } }
end
end
end
end
@desc """
The collection of possible results from a contact request. If successful
returns the `Contact` trying to be accessed, otherwise returns an error.
"""
result :contact, [:email_contact, :mobile_contact]
@desc """
The collection of possible results from a contact mutate request. If
successful returns the `MutableContact` trying to be modified, otherwise
returns an error.
"""
result :mutable_contact, [:mutable_email_contact, :mutable_mobile_contact]
mutable_object :contact_queries do
immutable do
@desc "The contacts associated with the identity"
field :contacts, list_of(result(mutable(:contact))) do
@desc "The status of the contacts to retrieve"
arg :status, :verification_status
@desc "The priority of the contacts to retrieve"
arg :priority, :contact_priority
resolve fn
%{ id: identity }, args, env = %{ definition: %{ selections: selections } } ->
contacts =
Enum.reduce(selections, [], fn
%Absinthe.Blueprint.Document.Fragment.Inline{ schema_node: %Absinthe.Type.Object{ identifier: object } }, acc when object in [mutable(:email_contact), mutable(:mobile_contact)] -> [object|acc]
%Absinthe.Blueprint.Document.Fragment.Inline{ schema_node: %Absinthe.Type.Interface{ identifier: contact } }, acc when contact in [:contact, mutable(:contact)] -> [mutable(:email_contact), mutable(:mobile_contact)] ++ acc
_, acc -> acc
end)
|> Enum.uniq
|> Enum.reduce([], fn
mutable(:email_contact), acc ->
case Sherbet.API.Contact.Email.contacts(identity) do
{ :ok, contacts } ->
filter_contacts(contacts, args, acc, fn { status, priority, email } ->
mutable(%{ priority: priority, status: status, presentable: email, email: email }, env)
end)
{ :error, reason } -> %Donut.GraphQL.Result.Error{ message: reason }
end
mutable(:mobile_contact), acc ->
case Sherbet.API.Contact.Mobile.contacts(identity) do
{ :ok, contacts } ->
filter_contacts(contacts, args, acc, fn { status, priority, mobile } ->
mutable(%{ priority: priority, status: status, presentable: mobile, mobile: mobile }, env)
end)
{ :error, reason } -> %Donut.GraphQL.Result.Error{ message: reason }
end
end)
|> Enum.reverse
{ :ok, contacts }
end
end
end
end
defp filter_contacts(contacts, %{ status: status, priority: priority }, acc, get_object) do
Enum.reduce(contacts, acc, fn contact, acc ->
case get_object.(contact) do
object = %{ status: ^status, priority: ^priority } -> [object|acc]
_ -> acc
end
end)
end
defp filter_contacts(contacts, %{ priority: priority }, acc, get_object) do
Enum.reduce(contacts, acc, fn contact, acc ->
case get_object.(contact) do
object = %{ priority: ^priority } -> [object|acc]
_ -> acc
end
end)
end
defp filter_contacts(contacts, %{ status: status }, acc, get_object) do
Enum.reduce(contacts, acc, fn contact, acc ->
case get_object.(contact) do
object = %{ status: ^status } -> [object|acc]
_ -> acc
end
end)
end
defp filter_contacts(contacts, _, acc, get_object) do
Enum.reduce(contacts, acc, &([get_object.(&1)|&2]))
end
object :contact_mutations do
@desc "Request a contact be removed from its associated identity"
field :request_remove_contact, type: result(:error) do
@desc "The email contact to request be removed"
arg :email, :string
@desc "The mobile contact to request be removed"
arg :mobile, :string
resolve fn
args = %{ email: email }, _ when map_size(args) == 1 ->
case Sherbet.API.Contact.Email.request_removal(email) do
:ok -> { :ok, nil }
{ :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } }
end
args = %{ mobile: mobile }, _ when map_size(args) == 1 ->
case Sherbet.API.Contact.Mobile.request_removal(mobile) do
:ok -> { :ok, nil }
{ :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } }
end
%{}, _ -> { :error, "Missing contact" }
_, _ -> { :error, "Only one contact can be specified" }
end
end
@desc "Finalise a contact be removed from its associated identity"
field :finalise_remove_contact, type: result(:error) do
@desc "The email contact to be removed"
arg :email, :string
@desc "The mobile contact to be removed"
arg :mobile, :string
@desc "The confirmation key"
arg :key, non_null(:string)
resolve fn
args = %{ email: email, key: key }, _ when map_size(args) == 2 ->
case Sherbet.API.Contact.Email.finalise_removal(email, key) do
:ok -> { :ok, nil }
{ :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } }
end
args = %{ mobile: mobile, key: key }, _ when map_size(args) == 2 ->
case Sherbet.API.Contact.Mobile.finalise_removal(mobile, key) do
:ok -> { :ok, nil }
{ :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } }
end
%{ key: _ }, _ -> { :error, "Missing contact" }
_, _ -> { :error, "Only one contact can be specified" }
end
end
end
object :contact_identity_mutations do
@desc "Add a contact to be associated with an identity"
field :add_contact, type: result(:mutable_contact) do
@desc "The email contact to be added"
arg :email, :string
@desc "The mobile contact to be added"
arg :mobile, :string
resolve fn
%{ id: identity }, args = %{ email: email }, env when map_size(args) == 1 ->
case Sherbet.API.Contact.Email.add(identity, email) do
:ok ->
case Sherbet.API.Contact.Email.contacts(identity) do
{ :ok, contacts } ->
Enum.find_value(contacts, fn
{ status, priority, ^email } -> %{ priority: priority, status: status, presentable: email, email: email }
_ -> false
end)
|> case do
false -> { :ok, %Donut.GraphQL.Result.Error{ message: "Failed to retrieve newly added email contact" } }
contact -> { :ok, mutable(contact, env) }
end
{ :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } }
end
{ :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } }
end
%{ id: identity }, args = %{ mobile: mobile }, env when map_size(args) == 1 ->
case Sherbet.API.Contact.Mobile.add(identity, mobile) do
:ok ->
case Sherbet.API.Contact.Mobile.contacts(identity) do
{ :ok, contacts } ->
Enum.find_value(contacts, fn
{ status, priority, ^mobile } -> %{ priority: priority, status: status, presentable: mobile, mobile: mobile }
_ -> false
end)
|> case do
false -> { :ok, %Donut.GraphQL.Result.Error{ message: "Failed to retrieve newly added mobile contact" } }
contact -> { :ok, mutable(contact, env) }
end
{ :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } }
end
{ :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } }
end
_, %{}, _ -> { :error, "Missing contact" }
_, _, _ -> { :error, "Only one contact can be specified" }
end
end
end
end
| 47.547855 | 253 | 0.496078 |
799890f8f8e031592d7b04160422003521530414 | 533 | ex | Elixir | test/factories/user_factory.ex | longnd/elixir-gscraper | 894570afd89e54b80ca591a56a182da55ac6ee61 | [
"MIT"
] | null | null | null | test/factories/user_factory.ex | longnd/elixir-gscraper | 894570afd89e54b80ca591a56a182da55ac6ee61 | [
"MIT"
] | 25 | 2021-03-23T07:27:21.000Z | 2021-10-31T15:09:52.000Z | test/factories/user_factory.ex | longnd/elixir-gscraper | 894570afd89e54b80ca591a56a182da55ac6ee61 | [
"MIT"
] | null | null | null | defmodule Gscraper.UserFactory do
alias Gscraper.Account.Schemas.User
defmacro __using__(_opts) do
quote do
alias Gscraper.Account.Passwords
def user_factory(attrs) do
password = attrs[:password] || Faker.Util.format("%6b%3d")
user = %User{
username: Faker.Internet.user_name(),
password: password,
password_confirmation: password,
encrypted_password: Passwords.hash(password)
}
merge_attributes(user, attrs)
end
end
end
end
| 23.173913 | 66 | 0.641651 |
7998ba4ee48b64a80b3203287ea49ae2beb8d71f | 888 | ex | Elixir | supervised_list_server/lib/list_server.ex | enilsen16/elixir | b4d1d45858a25e4beb39e07de8685f3d93d6a520 | [
"MIT"
] | null | null | null | supervised_list_server/lib/list_server.ex | enilsen16/elixir | b4d1d45858a25e4beb39e07de8685f3d93d6a520 | [
"MIT"
] | null | null | null | supervised_list_server/lib/list_server.ex | enilsen16/elixir | b4d1d45858a25e4beb39e07de8685f3d93d6a520 | [
"MIT"
] | null | null | null | defmodule ListServer do
use GenServer
### Public API
def start_link do
:gen_server.start_link({:local, :list}, __MODULE__, [], [])
end
def clear do
:gen_server.cast(:list, :clear)
end
def add(item) do
:gen_server.cast(:list, {:add, item})
end
def remove(item) do
:gen_server.cast(:list, {:remove, item})
end
def items do
:gen_server.call(:list, :items)
end
def crash do
:gen_server.cast :list, :crash
end
#### GenServer API
def init(list) do
{:ok, list}
end
def handle_cast(:clear, _list) do
{:noreply, []}
end
def handle_cast({:add, item}, list) do
{:noreply, list ++ [item]}
end
def handle_cast({:remove, item}, list) do
{:noreply, List.delete(list, item)}
end
def handle_cast(:crash, _list) do
1 = 2
end
def handle_call(:items, _from, list) do
{:reply, list, list}
end
end
| 17.076923 | 63 | 0.614865 |
7998bdd660a9922a44ba9157aff4bb516ae24d19 | 4,881 | ex | Elixir | test/support/service_twirp.ex | daskycodes/twirp-elixir | 51b701111a3b33601980703417388cd099dc7e44 | [
"Apache-2.0"
] | 30 | 2019-11-03T16:30:13.000Z | 2020-06-23T19:38:53.000Z | test/support/service_twirp.ex | daskycodes/twirp-elixir | 51b701111a3b33601980703417388cd099dc7e44 | [
"Apache-2.0"
] | 16 | 2020-03-13T17:56:16.000Z | 2020-06-11T10:40:02.000Z | test/support/service_twirp.ex | daskycodes/twirp-elixir | 51b701111a3b33601980703417388cd099dc7e44 | [
"Apache-2.0"
] | 3 | 2019-12-05T16:43:15.000Z | 2020-05-11T21:34:44.000Z | # Generated by the protobuf compile. DO NOT EDIT!
defmodule Twirp.Test.EchoService do
@moduledoc false
use Twirp.Service
package "twirp.test"
service "Echo"
rpc :Echo, Twirp.Test.Req, Twirp.Test.Resp, :echo
rpc :BatchEcho, Twirp.Test.BatchReq, Twirp.Test.BatchResp, :batch_echo
rpc :SlowEcho, Twirp.Test.Req, Twirp.Test.Resp, :slow_echo
rpc :Undocumented, Twirp.Test.Req, Twirp.Test.Resp, :undocumented
end
defmodule Twirp.Test.EchoClient do
@moduledoc """
Generated Twirp Client
"""
@package "twirp.test"
@service "Echo"
@type ctx :: map()
@callback echo(ctx(), Twirp.Test.Req.t()) ::
{:ok, Twirp.Test.Resp.t()} | {:error, Twirp.Error.t()}
@callback batch_echo(ctx(), Twirp.Test.BatchReq.t()) ::
{:ok, Twirp.Test.BatchResp.t()} | {:error, Twirp.Error.t()}
@callback slow_echo(ctx(), Twirp.Test.Req.t()) ::
{:ok, Twirp.Test.Resp.t()} | {:error, Twirp.Error.t()}
@callback undocumented(ctx(), Twirp.Test.Req.t()) ::
{:ok, Twirp.Test.Resp.t()} | {:error, Twirp.Error.t()}
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]}
}
end
@doc """
Starts a new service client.
## Options
* `:url` - The root url for the service.
* `:content_type` - Either `:proto` or `:json` based on the desired client type. Defaults to `:proto`.
* `:pool_config` - Configuration for the underlying Finch, http pool.
"""
def start_link(opts) do
url = opts[:url] || raise ArgumentError, "#{__MODULE__} requires a `:url` option"
content_type = opts[:content_type] || :proto
full_path = Path.join([url, "twirp", "#{@package}.#{@service}"])
interceptors = opts[:interceptors] || []
{adapter_mod, adapter_opts} =
opts[:adapter] || {Twirp.Client.Finch, pools: %{default: [size: 10, count: 1]}}
http_opts = %{
name: __MODULE__,
opts: adapter_opts
}
:persistent_term.put({__MODULE__, :url}, full_path)
:persistent_term.put({__MODULE__, :content_type}, content_type)
:persistent_term.put({__MODULE__, :interceptors}, interceptors)
:persistent_term.put({__MODULE__, :adapter}, {adapter_mod, adapter_opts})
Twirp.Client.HTTP.start_link(adapter_mod, http_opts)
end
@doc """
Echo's some text back to you
"""
@spec echo(ctx(), Twirp.Test.Req.t()) :: {:ok, Twirp.Test.Resp.t()} | {:error, Twirp.Error.t()}
def echo(ctx \\ %{}, %Twirp.Test.Req{} = req) do
rpc(:Echo, ctx, req, Twirp.Test.Req, Twirp.Test.Resp)
end
@spec batch_echo(ctx(), Twirp.Test.BatchReq.t()) ::
{:ok, Twirp.Test.BatchResp.t()} | {:error, Twirp.Error.t()}
def batch_echo(ctx \\ %{}, %Twirp.Test.BatchReq{} = req) do
rpc(:BatchEcho, ctx, req, Twirp.Test.BatchReq, Twirp.Test.BatchResp)
end
@doc """
Echo's some text back to you, slowly.
"""
@spec slow_echo(ctx(), Twirp.Test.Req.t()) ::
{:ok, Twirp.Test.Resp.t()} | {:error, Twirp.Error.t()}
def slow_echo(ctx \\ %{}, %Twirp.Test.Req{} = req) do
rpc(:SlowEcho, ctx, req, Twirp.Test.Req, Twirp.Test.Resp)
end
@spec undocumented(ctx(), Twirp.Test.Req.t()) ::
{:ok, Twirp.Test.Resp.t()} | {:error, Twirp.Error.t()}
def undocumented(ctx \\ %{}, %Twirp.Test.Req{} = req) do
rpc(:Undocumented, ctx, req, Twirp.Test.Req, Twirp.Test.Resp)
end
defp rpc(method, ctx, req, input_type, output_type) do
service_url = :persistent_term.get({__MODULE__, :url})
interceptors = :persistent_term.get({__MODULE__, :interceptors})
{adapter_mod, _} = :persistent_term.get({__MODULE__, :adapter})
content_type = Twirp.Encoder.type(:persistent_term.get({__MODULE__, :content_type}))
content_header = {"Content-Type", content_type}
ctx =
ctx
|> Map.put(:content_type, content_type)
|> Map.update(:headers, [content_header], &[content_header | &1])
|> Map.put_new(:deadline, 1_000)
rpcdef = %{
service_url: service_url,
method: method,
req: req,
input_type: input_type,
output_type: output_type
}
metadata = %{
client: __MODULE__,
method: method,
service: service_url
}
start = Twirp.Telemetry.start(:rpc, metadata)
call_chain =
chain(Enum.reverse(interceptors), fn ctx, req ->
case Twirp.Client.HTTP.call(adapter_mod, __MODULE__, ctx, %{rpcdef | req: req}) do
{:ok, resp} ->
Twirp.Telemetry.stop(:rpc, start, metadata)
{:ok, resp}
{:error, error} ->
metadata = Map.put(metadata, :error, error)
Twirp.Telemetry.stop(:rpc, start, metadata)
{:error, error}
end
end)
call_chain.(ctx, req)
end
defp chain([], f), do: f
defp chain([func | fs], acc_f) do
next = fn ctx, req ->
func.(ctx, req, acc_f)
end
chain(fs, next)
end
end
| 30.12963 | 104 | 0.617496 |
7998bfe41fc28909fd40c74935f15926a3e4a557 | 184 | exs | Elixir | test/meshx_service_mesh_test.exs | andrzej-mag/meshx | 390fc96fdff7e4631a6f263ed1aebb506baadd3d | [
"Apache-2.0"
] | 18 | 2021-05-20T07:09:31.000Z | 2021-12-22T10:17:56.000Z | test/meshx_service_mesh_test.exs | andrzej-mag/meshx | 390fc96fdff7e4631a6f263ed1aebb506baadd3d | [
"Apache-2.0"
] | 1 | 2021-05-26T08:36:36.000Z | 2021-05-26T09:53:31.000Z | test/meshx_service_mesh_test.exs | andrzej-mag/meshx | 390fc96fdff7e4631a6f263ed1aebb506baadd3d | [
"Apache-2.0"
] | null | null | null | defmodule MeshxServiceMeshTest do
use ExUnit.Case
test "assert Meshx.ServiceMesh behavior exists" do
assert Meshx.ServiceMesh.__info__(:module) == Meshx.ServiceMesh
end
end
| 23 | 67 | 0.782609 |
7998e425b6419b053e43744ddcf9efaa80d7fcae | 34,803 | exs | Elixir | lib/elixir/test/elixir/registry_test.exs | britto/elixir | 1f6e7093cff4b68dada60b924399bc8404d39a7e | [
"Apache-2.0"
] | 2 | 2020-06-02T18:00:28.000Z | 2021-12-10T03:21:42.000Z | lib/elixir/test/elixir/registry_test.exs | britto/elixir | 1f6e7093cff4b68dada60b924399bc8404d39a7e | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/registry_test.exs | britto/elixir | 1f6e7093cff4b68dada60b924399bc8404d39a7e | [
"Apache-2.0"
] | null | null | null | Code.require_file("test_helper.exs", __DIR__)
defmodule RegistryTest do
use ExUnit.Case, async: true
doctest Registry, except: [:moduledoc]
setup config do
keys = config[:keys] || :unique
partitions = config[:partitions] || 1
listeners = List.wrap(config[:listener])
opts = [keys: keys, name: config.test, partitions: partitions, listeners: listeners]
{:ok, _} = start_supervised({Registry, opts})
{:ok, %{registry: config.test, partitions: partitions}}
end
for {describe, partitions} <- ["with 1 partition": 1, "with 8 partitions": 8] do
describe "unique #{describe}" do
@describetag keys: :unique, partitions: partitions
test "starts configured number of partitions", %{registry: registry, partitions: partitions} do
assert length(Supervisor.which_children(registry)) == partitions
end
test "counts 0 keys in an empty registry", %{registry: registry} do
assert 0 == Registry.count(registry)
end
test "counts the number of keys in a registry", %{registry: registry} do
{:ok, _} = Registry.register(registry, "hello", :value)
{:ok, _} = Registry.register(registry, "world", :value)
assert 2 == Registry.count(registry)
end
test "has unique registrations", %{registry: registry} do
{:ok, pid} = Registry.register(registry, "hello", :value)
assert is_pid(pid)
assert Registry.keys(registry, self()) == ["hello"]
assert {:error, {:already_registered, pid}} = Registry.register(registry, "hello", :value)
assert pid == self()
assert Registry.keys(registry, self()) == ["hello"]
{:ok, pid} = Registry.register(registry, "world", :value)
assert is_pid(pid)
assert Registry.keys(registry, self()) |> Enum.sort() == ["hello", "world"]
end
test "has unique registrations across processes", %{registry: registry} do
{_, task} = register_task(registry, "hello", :value)
Process.link(Process.whereis(registry))
assert {:error, {:already_registered, ^task}} =
Registry.register(registry, "hello", :recent)
assert Registry.keys(registry, self()) == []
{:links, links} = Process.info(self(), :links)
assert Process.whereis(registry) in links
end
test "has unique registrations even if partition is delayed", %{registry: registry} do
{owner, task} = register_task(registry, "hello", :value)
assert Registry.register(registry, "hello", :other) ==
{:error, {:already_registered, task}}
:sys.suspend(owner)
kill_and_assert_down(task)
Registry.register(registry, "hello", :other)
assert Registry.lookup(registry, "hello") == [{self(), :other}]
end
test "supports match patterns", %{registry: registry} do
value = {1, :atom, 1}
{:ok, _} = Registry.register(registry, "hello", value)
assert Registry.match(registry, "hello", {1, :_, :_}) == [{self(), value}]
assert Registry.match(registry, "hello", {1.0, :_, :_}) == []
assert Registry.match(registry, "hello", {:_, :atom, :_}) == [{self(), value}]
assert Registry.match(registry, "hello", {:"$1", :_, :"$1"}) == [{self(), value}]
assert Registry.match(registry, "hello", :_) == [{self(), value}]
assert Registry.match(registry, :_, :_) == []
value2 = %{a: "a", b: "b"}
{:ok, _} = Registry.register(registry, "world", value2)
assert Registry.match(registry, "world", %{b: "b"}) == [{self(), value2}]
end
test "supports guard conditions", %{registry: registry} do
value = {1, :atom, 2}
{:ok, _} = Registry.register(registry, "hello", value)
assert Registry.match(registry, "hello", {:_, :_, :"$1"}, [{:>, :"$1", 1}]) ==
[{self(), value}]
assert Registry.match(registry, "hello", {:_, :_, :"$1"}, [{:>, :"$1", 2}]) == []
assert Registry.match(registry, "hello", {:_, :"$1", :_}, [{:is_atom, :"$1"}]) ==
[{self(), value}]
end
test "count_match supports match patterns", %{registry: registry} do
value = {1, :atom, 1}
{:ok, _} = Registry.register(registry, "hello", value)
assert 1 == Registry.count_match(registry, "hello", {1, :_, :_})
assert 0 == Registry.count_match(registry, "hello", {1.0, :_, :_})
assert 1 == Registry.count_match(registry, "hello", {:_, :atom, :_})
assert 1 == Registry.count_match(registry, "hello", {:"$1", :_, :"$1"})
assert 1 == Registry.count_match(registry, "hello", :_)
assert 0 == Registry.count_match(registry, :_, :_)
value2 = %{a: "a", b: "b"}
{:ok, _} = Registry.register(registry, "world", value2)
assert 1 == Registry.count_match(registry, "world", %{b: "b"})
end
test "count_match supports guard conditions", %{registry: registry} do
value = {1, :atom, 2}
{:ok, _} = Registry.register(registry, "hello", value)
assert 1 == Registry.count_match(registry, "hello", {:_, :_, :"$1"}, [{:>, :"$1", 1}])
assert 0 == Registry.count_match(registry, "hello", {:_, :_, :"$1"}, [{:>, :"$1", 2}])
assert 1 == Registry.count_match(registry, "hello", {:_, :"$1", :_}, [{:is_atom, :"$1"}])
end
test "unregister_match supports patterns", %{registry: registry} do
value = {1, :atom, 1}
{:ok, _} = Registry.register(registry, "hello", value)
Registry.unregister_match(registry, "hello", {2, :_, :_})
assert Registry.lookup(registry, "hello") == [{self(), value}]
Registry.unregister_match(registry, "hello", {1.0, :_, :_})
assert Registry.lookup(registry, "hello") == [{self(), value}]
Registry.unregister_match(registry, "hello", {:_, :atom, :_})
assert Registry.lookup(registry, "hello") == []
end
test "unregister_match supports guards", %{registry: registry} do
value = {1, :atom, 1}
{:ok, _} = Registry.register(registry, "hello", value)
Registry.unregister_match(registry, "hello", {:"$1", :_, :_}, [{:<, :"$1", 2}])
assert Registry.lookup(registry, "hello") == []
end
test "unregister_match supports tricky keys", %{registry: registry} do
{:ok, _} = Registry.register(registry, :_, :foo)
{:ok, _} = Registry.register(registry, "hello", "b")
Registry.unregister_match(registry, :_, :foo)
assert Registry.lookup(registry, :_) == []
assert Registry.keys(registry, self()) |> Enum.sort() == ["hello"]
end
test "compares using ===", %{registry: registry} do
{:ok, _} = Registry.register(registry, 1.0, :value)
{:ok, _} = Registry.register(registry, 1, :value)
assert Registry.keys(registry, self()) |> Enum.sort() == [1, 1.0]
end
test "updates current process value", %{registry: registry} do
assert Registry.update_value(registry, "hello", &raise/1) == :error
register_task(registry, "hello", :value)
assert Registry.update_value(registry, "hello", &raise/1) == :error
Registry.register(registry, "world", 1)
assert Registry.lookup(registry, "world") == [{self(), 1}]
assert Registry.update_value(registry, "world", &(&1 + 1)) == {2, 1}
assert Registry.lookup(registry, "world") == [{self(), 2}]
end
test "dispatches to a single key", %{registry: registry} do
fun = fn _ -> raise "will never be invoked" end
assert Registry.dispatch(registry, "hello", fun) == :ok
{:ok, _} = Registry.register(registry, "hello", :value)
fun = fn [{pid, value}] -> send(pid, {:dispatch, value}) end
assert Registry.dispatch(registry, "hello", fun)
assert_received {:dispatch, :value}
end
test "unregisters process by key", %{registry: registry} do
:ok = Registry.unregister(registry, "hello")
{:ok, _} = Registry.register(registry, "hello", :value)
{:ok, _} = Registry.register(registry, "world", :value)
assert Registry.keys(registry, self()) |> Enum.sort() == ["hello", "world"]
:ok = Registry.unregister(registry, "hello")
assert Registry.keys(registry, self()) == ["world"]
:ok = Registry.unregister(registry, "world")
assert Registry.keys(registry, self()) == []
end
test "unregisters with no entries", %{registry: registry} do
assert Registry.unregister(registry, "hello") == :ok
end
test "unregisters with tricky keys", %{registry: registry} do
{:ok, _} = Registry.register(registry, :_, :foo)
{:ok, _} = Registry.register(registry, "hello", "b")
Registry.unregister(registry, :_)
assert Registry.lookup(registry, :_) == []
assert Registry.keys(registry, self()) |> Enum.sort() == ["hello"]
end
@tag listener: :"unique_listener_#{partitions}"
test "allows listeners", %{registry: registry, listener: listener} do
Process.register(self(), listener)
{_, task} = register_task(registry, "hello", :world)
assert_received {:register, ^registry, "hello", ^task, :world}
self = self()
{:ok, _} = Registry.register(registry, "world", :value)
assert_received {:register, ^registry, "world", ^self, :value}
:ok = Registry.unregister(registry, "world")
assert_received {:unregister, ^registry, "world", ^self}
end
test "links and unlinks on register/unregister", %{registry: registry} do
{:ok, pid} = Registry.register(registry, "hello", :value)
{:links, links} = Process.info(self(), :links)
assert pid in links
{:ok, pid} = Registry.register(registry, "world", :value)
{:links, links} = Process.info(self(), :links)
assert pid in links
:ok = Registry.unregister(registry, "hello")
{:links, links} = Process.info(self(), :links)
assert pid in links
:ok = Registry.unregister(registry, "world")
{:links, links} = Process.info(self(), :links)
refute pid in links
end
test "raises on unknown registry name" do
assert_raise ArgumentError, ~r/unknown registry/, fn ->
Registry.register(:unknown, "hello", :value)
end
end
test "via callbacks", %{registry: registry} do
name = {:via, Registry, {registry, "hello"}}
# register_name
{:ok, pid} = Agent.start_link(fn -> 0 end, name: name)
# send
assert Agent.update(name, &(&1 + 1)) == :ok
# whereis_name
assert Agent.get(name, & &1) == 1
# unregister_name
assert {:error, _} = Agent.start(fn -> raise "oops" end)
# errors
assert {:error, {:already_started, ^pid}} = Agent.start(fn -> 0 end, name: name)
end
test "uses value provided in via", %{registry: registry} do
name = {:via, Registry, {registry, "hello", :value}}
{:ok, pid} = Agent.start_link(fn -> 0 end, name: name)
assert Registry.lookup(registry, "hello") == [{pid, :value}]
end
test "empty list for empty registry", %{registry: registry} do
assert Registry.select(registry, [{{:_, :_, :_}, [], [:"$_"]}]) == []
end
test "select all", %{registry: registry} do
name = {:via, Registry, {registry, "hello"}}
{:ok, pid} = Agent.start_link(fn -> 0 end, name: name)
{:ok, _} = Registry.register(registry, "world", :value)
assert Registry.select(registry, [{{:"$1", :"$2", :"$3"}, [], [{{:"$1", :"$2", :"$3"}}]}])
|> Enum.sort() ==
[{"hello", pid, nil}, {"world", self(), :value}]
end
test "select supports full match specs", %{registry: registry} do
value = {1, :atom, 1}
{:ok, _} = Registry.register(registry, "hello", value)
assert [{"hello", self(), value}] ==
Registry.select(registry, [
{{"hello", :"$2", :"$3"}, [], [{{"hello", :"$2", :"$3"}}]}
])
assert [{"hello", self(), value}] ==
Registry.select(registry, [
{{:"$1", self(), :"$3"}, [], [{{:"$1", self(), :"$3"}}]}
])
assert [{"hello", self(), value}] ==
Registry.select(registry, [
{{:"$1", :"$2", value}, [], [{{:"$1", :"$2", {value}}}]}
])
assert [] ==
Registry.select(registry, [
{{"world", :"$2", :"$3"}, [], [{{"world", :"$2", :"$3"}}]}
])
assert [] == Registry.select(registry, [{{:"$1", :"$2", {1.0, :_, :_}}, [], [:"$_"]}])
assert [{"hello", self(), value}] ==
Registry.select(registry, [
{{:"$1", :"$2", {:"$3", :atom, :"$4"}}, [],
[{{:"$1", :"$2", {{:"$3", :atom, :"$4"}}}}]}
])
assert [{"hello", self(), {1, :atom, 1}}] ==
Registry.select(registry, [
{{:"$1", :"$2", {:"$3", :"$4", :"$3"}}, [],
[{{:"$1", :"$2", {{:"$3", :"$4", :"$3"}}}}]}
])
value2 = %{a: "a", b: "b"}
{:ok, _} = Registry.register(registry, "world", value2)
assert [:match] ==
Registry.select(registry, [{{"world", self(), %{b: "b"}}, [], [:match]}])
assert ["hello", "world"] ==
Registry.select(registry, [{{:"$1", :_, :_}, [], [:"$1"]}]) |> Enum.sort()
end
test "select supports guard conditions", %{registry: registry} do
value = {1, :atom, 2}
{:ok, _} = Registry.register(registry, "hello", value)
assert [{"hello", self(), {1, :atom, 2}}] ==
Registry.select(registry, [
{{:"$1", :"$2", {:"$3", :"$4", :"$5"}}, [{:>, :"$5", 1}],
[{{:"$1", :"$2", {{:"$3", :"$4", :"$5"}}}}]}
])
assert [] ==
Registry.select(registry, [
{{:_, :_, {:_, :_, :"$1"}}, [{:>, :"$1", 2}], [:"$_"]}
])
assert ["hello"] ==
Registry.select(registry, [
{{:"$1", :_, {:_, :"$2", :_}}, [{:is_atom, :"$2"}], [:"$1"]}
])
end
test "select allows multiple specs", %{registry: registry} do
{:ok, _} = Registry.register(registry, "hello", :value)
{:ok, _} = Registry.register(registry, "world", :value)
assert ["hello", "world"] ==
Registry.select(registry, [
{{"hello", :_, :_}, [], [{:element, 1, :"$_"}]},
{{"world", :_, :_}, [], [{:element, 1, :"$_"}]}
])
|> Enum.sort()
end
test "raises on incorrect shape of match spec", %{registry: registry} do
assert_raise ArgumentError, fn ->
Registry.select(registry, [{:_, [], []}])
end
end
test "doesn't grow ets on already_registered",
%{registry: registry, partitions: partitions} do
assert sum_pid_entries(registry, partitions) == 0
{:ok, pid} = Registry.register(registry, "hello", :value)
assert is_pid(pid)
assert sum_pid_entries(registry, partitions) == 1
{:ok, pid} = Registry.register(registry, "world", :value)
assert is_pid(pid)
assert sum_pid_entries(registry, partitions) == 2
assert {:error, {:already_registered, _pid}} =
Registry.register(registry, "hello", :value)
assert sum_pid_entries(registry, partitions) == 2
end
test "doesn't grow ets on already_registered across processes",
%{registry: registry, partitions: partitions} do
assert sum_pid_entries(registry, partitions) == 0
{_, task} = register_task(registry, "hello", :value)
Process.link(Process.whereis(registry))
assert sum_pid_entries(registry, partitions) == 1
{:ok, pid} = Registry.register(registry, "world", :value)
assert is_pid(pid)
assert sum_pid_entries(registry, partitions) == 2
assert {:error, {:already_registered, ^task}} =
Registry.register(registry, "hello", :recent)
assert sum_pid_entries(registry, partitions) == 2
end
end
end
for {describe, partitions} <- ["with 1 partition": 1, "with 8 partitions": 8] do
describe "duplicate #{describe}" do
@describetag keys: :duplicate, partitions: partitions
test "starts configured number of partitions", %{registry: registry, partitions: partitions} do
assert length(Supervisor.which_children(registry)) == partitions
end
test "counts 0 keys in an empty registry", %{registry: registry} do
assert 0 == Registry.count(registry)
end
test "counts the number of keys in a registry", %{registry: registry} do
{:ok, _} = Registry.register(registry, "hello", :value)
{:ok, _} = Registry.register(registry, "hello", :value)
assert 2 == Registry.count(registry)
end
test "has duplicate registrations", %{registry: registry} do
{:ok, pid} = Registry.register(registry, "hello", :value)
assert is_pid(pid)
assert Registry.keys(registry, self()) == ["hello"]
assert {:ok, pid} = Registry.register(registry, "hello", :value)
assert is_pid(pid)
assert Registry.keys(registry, self()) == ["hello", "hello"]
{:ok, pid} = Registry.register(registry, "world", :value)
assert is_pid(pid)
assert Registry.keys(registry, self()) |> Enum.sort() == ["hello", "hello", "world"]
end
test "compares using matches", %{registry: registry} do
{:ok, _} = Registry.register(registry, 1.0, :value)
{:ok, _} = Registry.register(registry, 1, :value)
assert Registry.keys(registry, self()) |> Enum.sort() == [1, 1.0]
end
test "dispatches to multiple keys in serial", %{registry: registry} do
Process.flag(:trap_exit, true)
parent = self()
fun = fn _ -> raise "will never be invoked" end
assert Registry.dispatch(registry, "hello", fun, parallel: false) == :ok
{:ok, _} = Registry.register(registry, "hello", :value1)
{:ok, _} = Registry.register(registry, "hello", :value2)
{:ok, _} = Registry.register(registry, "world", :value3)
fun = fn entries ->
assert parent == self()
for {pid, value} <- entries, do: send(pid, {:dispatch, value})
end
assert Registry.dispatch(registry, "hello", fun, parallel: false)
assert_received {:dispatch, :value1}
assert_received {:dispatch, :value2}
refute_received {:dispatch, :value3}
fun = fn entries ->
assert parent == self()
for {pid, value} <- entries, do: send(pid, {:dispatch, value})
end
assert Registry.dispatch(registry, "world", fun, parallel: false)
refute_received {:dispatch, :value1}
refute_received {:dispatch, :value2}
assert_received {:dispatch, :value3}
refute_received {:EXIT, _, _}
end
test "dispatches to multiple keys in parallel", context do
%{registry: registry, partitions: partitions} = context
Process.flag(:trap_exit, true)
parent = self()
fun = fn _ -> raise "will never be invoked" end
assert Registry.dispatch(registry, "hello", fun, parallel: true) == :ok
{:ok, _} = Registry.register(registry, "hello", :value1)
{:ok, _} = Registry.register(registry, "hello", :value2)
{:ok, _} = Registry.register(registry, "world", :value3)
fun = fn entries ->
if partitions == 8 do
assert parent != self()
else
assert parent == self()
end
for {pid, value} <- entries, do: send(pid, {:dispatch, value})
end
assert Registry.dispatch(registry, "hello", fun, parallel: true)
assert_received {:dispatch, :value1}
assert_received {:dispatch, :value2}
refute_received {:dispatch, :value3}
fun = fn entries ->
if partitions == 8 do
assert parent != self()
else
assert parent == self()
end
for {pid, value} <- entries, do: send(pid, {:dispatch, value})
end
assert Registry.dispatch(registry, "world", fun, parallel: true)
refute_received {:dispatch, :value1}
refute_received {:dispatch, :value2}
assert_received {:dispatch, :value3}
refute_received {:EXIT, _, _}
end
test "unregisters by key", %{registry: registry} do
{:ok, _} = Registry.register(registry, "hello", :value)
{:ok, _} = Registry.register(registry, "hello", :value)
{:ok, _} = Registry.register(registry, "world", :value)
assert Registry.keys(registry, self()) |> Enum.sort() == ["hello", "hello", "world"]
:ok = Registry.unregister(registry, "hello")
assert Registry.keys(registry, self()) == ["world"]
:ok = Registry.unregister(registry, "world")
assert Registry.keys(registry, self()) == []
end
test "unregisters with no entries", %{registry: registry} do
assert Registry.unregister(registry, "hello") == :ok
end
test "unregisters with tricky keys", %{registry: registry} do
{:ok, _} = Registry.register(registry, :_, :foo)
{:ok, _} = Registry.register(registry, :_, :bar)
{:ok, _} = Registry.register(registry, "hello", "a")
{:ok, _} = Registry.register(registry, "hello", "b")
Registry.unregister(registry, :_)
assert Registry.keys(registry, self()) |> Enum.sort() == ["hello", "hello"]
end
test "supports match patterns", %{registry: registry} do
value1 = {1, :atom, 1}
value2 = {2, :atom, 2}
{:ok, _} = Registry.register(registry, "hello", value1)
{:ok, _} = Registry.register(registry, "hello", value2)
assert Registry.match(registry, "hello", {1, :_, :_}) == [{self(), value1}]
assert Registry.match(registry, "hello", {1.0, :_, :_}) == []
assert Registry.match(registry, "hello", {:_, :atom, :_}) |> Enum.sort() ==
[{self(), value1}, {self(), value2}]
assert Registry.match(registry, "hello", {:"$1", :_, :"$1"}) |> Enum.sort() ==
[{self(), value1}, {self(), value2}]
assert Registry.match(registry, "hello", {2, :_, :_}) == [{self(), value2}]
assert Registry.match(registry, "hello", {2.0, :_, :_}) == []
end
test "supports guards", %{registry: registry} do
value1 = {1, :atom, 1}
value2 = {2, :atom, 2}
{:ok, _} = Registry.register(registry, "hello", value1)
{:ok, _} = Registry.register(registry, "hello", value2)
assert Registry.match(registry, "hello", {:"$1", :_, :_}, [{:<, :"$1", 2}]) ==
[{self(), value1}]
assert Registry.match(registry, "hello", {:"$1", :_, :_}, [{:>, :"$1", 3}]) == []
assert Registry.match(registry, "hello", {:"$1", :_, :_}, [{:<, :"$1", 3}]) |> Enum.sort() ==
[{self(), value1}, {self(), value2}]
assert Registry.match(registry, "hello", {:_, :"$1", :_}, [{:is_atom, :"$1"}])
|> Enum.sort() == [{self(), value1}, {self(), value2}]
end
test "count_match supports match patterns", %{registry: registry} do
value = {1, :atom, 1}
{:ok, _} = Registry.register(registry, "hello", value)
assert 1 == Registry.count_match(registry, "hello", {1, :_, :_})
assert 0 == Registry.count_match(registry, "hello", {1.0, :_, :_})
assert 1 == Registry.count_match(registry, "hello", {:_, :atom, :_})
assert 1 == Registry.count_match(registry, "hello", {:"$1", :_, :"$1"})
assert 1 == Registry.count_match(registry, "hello", :_)
assert 0 == Registry.count_match(registry, :_, :_)
value2 = %{a: "a", b: "b"}
{:ok, _} = Registry.register(registry, "world", value2)
assert 1 == Registry.count_match(registry, "world", %{b: "b"})
end
test "count_match supports guard conditions", %{registry: registry} do
value = {1, :atom, 2}
{:ok, _} = Registry.register(registry, "hello", value)
assert 1 == Registry.count_match(registry, "hello", {:_, :_, :"$1"}, [{:>, :"$1", 1}])
assert 0 == Registry.count_match(registry, "hello", {:_, :_, :"$1"}, [{:>, :"$1", 2}])
assert 1 == Registry.count_match(registry, "hello", {:_, :"$1", :_}, [{:is_atom, :"$1"}])
end
test "unregister_match supports patterns", %{registry: registry} do
value1 = {1, :atom, 1}
value2 = {2, :atom, 2}
{:ok, _} = Registry.register(registry, "hello", value1)
{:ok, _} = Registry.register(registry, "hello", value2)
Registry.unregister_match(registry, "hello", {2, :_, :_})
assert Registry.lookup(registry, "hello") == [{self(), value1}]
{:ok, _} = Registry.register(registry, "hello", value2)
Registry.unregister_match(registry, "hello", {2.0, :_, :_})
assert Registry.lookup(registry, "hello") == [{self(), value1}, {self(), value2}]
Registry.unregister_match(registry, "hello", {:_, :atom, :_})
assert Registry.lookup(registry, "hello") == []
end
test "unregister_match supports guards", %{registry: registry} do
value1 = {1, :atom, 1}
value2 = {2, :atom, 2}
{:ok, _} = Registry.register(registry, "hello", value1)
{:ok, _} = Registry.register(registry, "hello", value2)
Registry.unregister_match(registry, "hello", {:"$1", :_, :_}, [{:<, :"$1", 2}])
assert Registry.lookup(registry, "hello") == [{self(), value2}]
end
test "unregister_match supports tricky keys", %{registry: registry} do
{:ok, _} = Registry.register(registry, :_, :foo)
{:ok, _} = Registry.register(registry, :_, :bar)
{:ok, _} = Registry.register(registry, "hello", "a")
{:ok, _} = Registry.register(registry, "hello", "b")
Registry.unregister_match(registry, :_, :foo)
assert Registry.lookup(registry, :_) == [{self(), :bar}]
assert Registry.keys(registry, self()) |> Enum.sort() == [:_, "hello", "hello"]
end
@tag listener: :"duplicate_listener_#{partitions}"
test "allows listeners", %{registry: registry, listener: listener} do
Process.register(self(), listener)
{_, task} = register_task(registry, "hello", :world)
assert_received {:register, ^registry, "hello", ^task, :world}
self = self()
{:ok, _} = Registry.register(registry, "hello", :value)
assert_received {:register, ^registry, "hello", ^self, :value}
:ok = Registry.unregister(registry, "hello")
assert_received {:unregister, ^registry, "hello", ^self}
end
test "links and unlinks on register/unregister", %{registry: registry} do
{:ok, pid} = Registry.register(registry, "hello", :value)
{:links, links} = Process.info(self(), :links)
assert pid in links
{:ok, pid} = Registry.register(registry, "world", :value)
{:links, links} = Process.info(self(), :links)
assert pid in links
:ok = Registry.unregister(registry, "hello")
{:links, links} = Process.info(self(), :links)
assert pid in links
:ok = Registry.unregister(registry, "world")
{:links, links} = Process.info(self(), :links)
refute pid in links
end
test "raises on unknown registry name" do
assert_raise ArgumentError, ~r/unknown registry/, fn ->
Registry.register(:unknown, "hello", :value)
end
end
test "raises if attempt to be used on via", %{registry: registry} do
assert_raise ArgumentError, ":via is not supported for duplicate registries", fn ->
name = {:via, Registry, {registry, "hello"}}
Agent.start_link(fn -> 0 end, name: name)
end
end
test "empty list for empty registry", %{registry: registry} do
assert Registry.select(registry, [{{:_, :_, :_}, [], [:"$_"]}]) == []
end
test "select all", %{registry: registry} do
{:ok, _} = Registry.register(registry, "hello", :value)
{:ok, _} = Registry.register(registry, "hello", :value)
assert Registry.select(registry, [{{:"$1", :"$2", :"$3"}, [], [{{:"$1", :"$2", :"$3"}}]}])
|> Enum.sort() ==
[{"hello", self(), :value}, {"hello", self(), :value}]
end
test "select supports full match specs", %{registry: registry} do
value = {1, :atom, 1}
{:ok, _} = Registry.register(registry, "hello", value)
assert [{"hello", self(), value}] ==
Registry.select(registry, [
{{"hello", :"$2", :"$3"}, [], [{{"hello", :"$2", :"$3"}}]}
])
assert [{"hello", self(), value}] ==
Registry.select(registry, [
{{:"$1", self(), :"$3"}, [], [{{:"$1", self(), :"$3"}}]}
])
assert [{"hello", self(), value}] ==
Registry.select(registry, [
{{:"$1", :"$2", value}, [], [{{:"$1", :"$2", {value}}}]}
])
assert [] ==
Registry.select(registry, [
{{"world", :"$2", :"$3"}, [], [{{"world", :"$2", :"$3"}}]}
])
assert [] == Registry.select(registry, [{{:"$1", :"$2", {1.0, :_, :_}}, [], [:"$_"]}])
assert [{"hello", self(), value}] ==
Registry.select(registry, [
{{:"$1", :"$2", {:"$3", :atom, :"$4"}}, [],
[{{:"$1", :"$2", {{:"$3", :atom, :"$4"}}}}]}
])
assert [{"hello", self(), {1, :atom, 1}}] ==
Registry.select(registry, [
{{:"$1", :"$2", {:"$3", :"$4", :"$3"}}, [],
[{{:"$1", :"$2", {{:"$3", :"$4", :"$3"}}}}]}
])
value2 = %{a: "a", b: "b"}
{:ok, _} = Registry.register(registry, "world", value2)
assert [:match] ==
Registry.select(registry, [{{"world", self(), %{b: "b"}}, [], [:match]}])
assert ["hello", "world"] ==
Registry.select(registry, [{{:"$1", :_, :_}, [], [:"$1"]}]) |> Enum.sort()
end
test "select supports guard conditions", %{registry: registry} do
value = {1, :atom, 2}
{:ok, _} = Registry.register(registry, "hello", value)
assert [{"hello", self(), {1, :atom, 2}}] ==
Registry.select(registry, [
{{:"$1", :"$2", {:"$3", :"$4", :"$5"}}, [{:>, :"$5", 1}],
[{{:"$1", :"$2", {{:"$3", :"$4", :"$5"}}}}]}
])
assert [] ==
Registry.select(registry, [
{{:_, :_, {:_, :_, :"$1"}}, [{:>, :"$1", 2}], [:"$_"]}
])
assert ["hello"] ==
Registry.select(registry, [
{{:"$1", :_, {:_, :"$2", :_}}, [{:is_atom, :"$2"}], [:"$1"]}
])
end
test "select allows multiple specs", %{registry: registry} do
{:ok, _} = Registry.register(registry, "hello", :value)
{:ok, _} = Registry.register(registry, "world", :value)
assert ["hello", "world"] ==
Registry.select(registry, [
{{"hello", :_, :_}, [], [{:element, 1, :"$_"}]},
{{"world", :_, :_}, [], [{:element, 1, :"$_"}]}
])
|> Enum.sort()
end
end
end
# Note: those tests relies on internals
for keys <- [:unique, :duplicate] do
describe "clean up #{keys} registry on process crash" do
@describetag keys: keys
@tag partitions: 8
test "with 8 partitions", %{registry: registry} do
{_, task1} = register_task(registry, "hello", :value)
{_, task2} = register_task(registry, "world", :value)
kill_and_assert_down(task1)
kill_and_assert_down(task2)
# pid might be in different partition to key so need to sync with all
# partitions before checking ETS tables are empty.
for i <- 0..7 do
[{_, _, {partition, _}}] = :ets.lookup(registry, i)
GenServer.call(partition, :sync)
end
for i <- 0..7 do
[{_, key, {_, pid}}] = :ets.lookup(registry, i)
assert :ets.tab2list(key) == []
assert :ets.tab2list(pid) == []
end
end
@tag partitions: 1
test "with 1 partition", %{registry: registry} do
{_, task1} = register_task(registry, "hello", :value)
{_, task2} = register_task(registry, "world", :value)
kill_and_assert_down(task1)
kill_and_assert_down(task2)
[{-1, {_, _, key, {partition, pid}, _}}] = :ets.lookup(registry, -1)
GenServer.call(partition, :sync)
assert :ets.tab2list(key) == []
assert :ets.tab2list(pid) == []
end
end
end
test "child_spec/1 uses :name as :id" do
assert %{id: :custom_name} = Registry.child_spec(name: :custom_name)
assert %{id: Registry} = Registry.child_spec([])
end
test "raises if :name is missing" do
assert_raise ArgumentError, ~r/expected :name option to be present/, fn ->
Registry.start_link(keys: :unique)
end
end
test "raises if :name is not an atom" do
assert_raise ArgumentError, ~r/expected :name to be an atom, got/, fn ->
Registry.start_link(keys: :unique, name: [])
end
end
defp register_task(registry, key, value) do
parent = self()
{:ok, task} =
Task.start(fn ->
send(parent, Registry.register(registry, key, value))
Process.sleep(:infinity)
end)
assert_receive {:ok, owner}
{owner, task}
end
defp kill_and_assert_down(pid) do
ref = Process.monitor(pid)
Process.exit(pid, :kill)
assert_receive {:DOWN, ^ref, _, _, _}
end
defp sum_pid_entries(registry, partitions) do
Enum.map(0..(partitions - 1), &Module.concat(registry, "PIDPartition#{&1}"))
|> sum_ets_entries()
end
defp sum_ets_entries(table_names) do
table_names
|> Enum.map(&ets_entries/1)
|> Enum.sum()
end
defp ets_entries(table_name) do
:ets.all()
|> Enum.find_value(fn id -> :ets.info(id, :name) == table_name and :ets.info(id, :size) end)
end
end
| 38.627081 | 101 | 0.535615 |
7998fbfb4d11b0bfca587ae02d17b172fa54d7f9 | 3,566 | exs | Elixir | test/crontab/date_checker_test.exs | mveytsman/crontab | f42a90a1ab227c9137bd9f31d33ccfb1a82439e2 | [
"MIT"
] | 71 | 2017-01-17T12:43:20.000Z | 2022-03-11T09:31:32.000Z | test/crontab/date_checker_test.exs | mveytsman/crontab | f42a90a1ab227c9137bd9f31d33ccfb1a82439e2 | [
"MIT"
] | 70 | 2017-01-18T08:49:19.000Z | 2022-03-31T00:34:47.000Z | test/crontab/date_checker_test.exs | mveytsman/crontab | f42a90a1ab227c9137bd9f31d33ccfb1a82439e2 | [
"MIT"
] | 29 | 2017-01-02T07:24:11.000Z | 2022-03-11T15:57:56.000Z | defmodule Crontab.DateCheckerTest do
@moduledoc false
use ExUnit.Case, async: true
doctest Crontab.DateChecker
import Crontab.DateChecker
test "2002-01-13 23:00:07 matches * * * * *" do
base_date = ~N[2002-01-13 23:00:07]
assert matches_date?(:minute, [:*], base_date) == true
assert matches_date?(:hour, [:*], base_date) == true
assert matches_date?(:day, [:*], base_date) == true
assert matches_date?(:month, [:*], base_date) == true
assert matches_date?(:weekday, [:*], base_date) == true
assert matches_date?(:year, [:*], base_date) == true
end
test "2002-01-13 23:00:07Z matches * * * * *" do
base_date = ~U[2002-01-13 23:00:07Z]
assert matches_date?(:minute, [:*], base_date) == true
assert matches_date?(:hour, [:*], base_date) == true
assert matches_date?(:day, [:*], base_date) == true
assert matches_date?(:month, [:*], base_date) == true
assert matches_date?(:weekday, [:*], base_date) == true
assert matches_date?(:year, [:*], base_date) == true
end
test "2004-04-16 04:04:08 matches */4 */4 */4 */5 */4" do
base_date = ~N[2004-04-16 04:04:08]
assert matches_date?(:second, [{:/, :*, 4}], base_date) == true
assert matches_date?(:minute, [{:/, :*, 4}], base_date) == true
assert matches_date?(:hour, [{:/, :*, 4}], base_date) == true
assert matches_date?(:day, [{:/, :*, 4}], base_date) == true
assert matches_date?(:month, [{:/, :*, 4}], base_date) == true
assert matches_date?(:weekday, [{:/, :*, 5}], base_date) == true
assert matches_date?(:year, [{:/, :*, 4}], base_date) == true
end
test "2003-04-17 04:04:08 doesn't match */3 */3 */3 */3 */3" do
base_date = ~N[2003-04-17 04:04:08]
assert matches_date?(:minute, [{:/, :*, 3}], base_date) == false
assert matches_date?(:hour, [{:/, :*, 3}], base_date) == false
assert matches_date?(:day, [{:/, :*, 3}], base_date) == false
assert matches_date?(:month, [{:/, :*, 3}], base_date) == false
assert matches_date?(:weekday, [{:/, :*, 3}], base_date) == false
assert matches_date?(:year, [{:/, :*, 3}], base_date) == false
end
test "2003-04-17 04:04:08 doesn't match */3 */3 */3 */3 */3 */3" do
base_date = ~N[2003-04-17 04:04:08]
assert matches_date?(:second, [{:/, :*, 3}], base_date) == false
assert matches_date?(:minute, [{:/, :*, 3}], base_date) == false
assert matches_date?(:hour, [{:/, :*, 3}], base_date) == false
assert matches_date?(:day, [{:/, :*, 3}], base_date) == false
assert matches_date?(:month, [{:/, :*, 3}], base_date) == false
assert matches_date?(:weekday, [{:/, :*, 3}], base_date) == false
assert matches_date?(:year, [{:/, :*, 3}], base_date) == false
end
test "2003-04-17 04:04:08Z doesn't match */3 */3 */3 */3 */3 */3" do
base_date = ~U[2003-04-17 04:04:08Z]
assert matches_date?(:second, [{:/, :*, 3}], base_date) == false
assert matches_date?(:minute, [{:/, :*, 3}], base_date) == false
assert matches_date?(:hour, [{:/, :*, 3}], base_date) == false
assert matches_date?(:day, [{:/, :*, 3}], base_date) == false
assert matches_date?(:month, [{:/, :*, 3}], base_date) == false
assert matches_date?(:weekday, [{:/, :*, 3}], base_date) == false
assert matches_date?(:year, [{:/, :*, 3}], base_date) == false
end
test "fail on @reboot" do
base_date = ~N[2003-04-17 04:04:08]
assert_raise RuntimeError, "Special identifier @reboot is not supported.", fn ->
matches_date?(%Crontab.CronExpression{reboot: true}, base_date)
end
end
end
| 44.024691 | 84 | 0.587213 |
7999182f2a0350c2044a70a6531775ffddc2cf1e | 1,395 | ex | Elixir | lib/freshcom_web/normalization.ex | freshcom/freshcom_web | fab44c9468e86b1770eef9971a97ad2b11545e9c | [
"BSD-3-Clause"
] | 9 | 2018-12-16T14:02:59.000Z | 2021-01-19T07:25:40.000Z | lib/freshcom_web/normalization.ex | freshcom/freshcom_web | fab44c9468e86b1770eef9971a97ad2b11545e9c | [
"BSD-3-Clause"
] | null | null | null | lib/freshcom_web/normalization.ex | freshcom/freshcom_web | fab44c9468e86b1770eef9971a97ad2b11545e9c | [
"BSD-3-Clause"
] | 4 | 2018-12-16T17:50:01.000Z | 2021-01-19T07:25:51.000Z | defmodule FreshcomWeb.Normalization do
def underscore(map, keys) when is_map(map) do
Enum.reduce(map, map, fn({k, v}, acc) ->
if Enum.member?(keys, k) && acc[k] do
%{acc | k => Inflex.underscore(v)}
else
acc
end
end)
end
def underscore(str) when is_binary(str) do
Inflex.underscore(str)
end
@doc """
Recursively underscore the keys of a given map.
"""
def underscore_keys(map) when is_map(map) do
Enum.reduce(map, %{}, fn({key, value}, acc) ->
Map.put(acc, underscore(key), underscore_keys(value))
end)
end
def underscore_keys(list) when is_list(list) do
Enum.map(list, &underscore_keys/1)
end
def underscore_keys(item), do: item
def camelize(str) do
Inflex.camelize(str, :lower)
end
def camelize_keys(map) do
Enum.reduce(map, %{}, fn({key, value}, acc) ->
Map.put(acc, camelize(key), value)
end)
end
def to_jsonapi_errors(errors) do
Enum.reduce(errors, [], fn(error, acc) ->
case error do
{:error, key, {reason, meta}} ->
acc ++ [%{code: camelize(reason), source: error_source(key), meta: Enum.into(meta, %{})}]
{:error, key, reason} ->
acc ++ [%{code: camelize(reason), source: error_source(key), meta: %{}}]
end
end)
end
defp error_source(key) do
%{pointer: "/data/attributes/#{camelize(key)}"}
end
end | 25.363636 | 99 | 0.606452 |
79992862d51e2fdef7765339fd1a670e55ce84b6 | 3,719 | exs | Elixir | test/siwapp_web/controllers/user_confirmation_controller_test.exs | jakon89/siwapp | b5f8fd43458deae72c76e434ed0c63b620cb97a4 | [
"MIT"
] | 4 | 2015-02-12T09:23:47.000Z | 2022-03-09T18:11:06.000Z | test/siwapp_web/controllers/user_confirmation_controller_test.exs | jakon89/siwapp | b5f8fd43458deae72c76e434ed0c63b620cb97a4 | [
"MIT"
] | 254 | 2021-12-09T14:40:41.000Z | 2022-03-31T08:09:37.000Z | test/siwapp_web/controllers/user_confirmation_controller_test.exs | jakon89/siwapp | b5f8fd43458deae72c76e434ed0c63b620cb97a4 | [
"MIT"
] | 1 | 2022-03-07T10:25:49.000Z | 2022-03-07T10:25:49.000Z | defmodule SiwappWeb.UserConfirmationControllerTest do
use SiwappWeb.ConnCase, async: true
import Siwapp.AccountsFixtures
alias Siwapp.Accounts
alias Siwapp.Repo
setup do
%{user: user_fixture()}
end
describe "GET /users/confirm" do
test "renders the resend confirmation page", %{conn: conn} do
conn = get(conn, Routes.user_confirmation_path(conn, :new))
response = html_response(conn, 200)
assert response =~ "<h1>Resend confirmation instructions</h1>"
end
end
describe "POST /users/confirm" do
@tag :capture_log
test "sends a new confirmation token", %{conn: conn, user: user} do
conn =
post(conn, Routes.user_confirmation_path(conn, :create), %{
"user" => %{"email" => user.email}
})
assert redirected_to(conn) == "/"
assert get_flash(conn, :info) =~ "If your email is in our system"
assert Repo.get_by!(Accounts.UserToken, user_id: user.id).context == "confirm"
end
test "does not send confirmation token if User is confirmed", %{conn: conn, user: user} do
Repo.update!(Accounts.User.confirm_changeset(user))
conn =
post(conn, Routes.user_confirmation_path(conn, :create), %{
"user" => %{"email" => user.email}
})
assert redirected_to(conn) == "/"
assert get_flash(conn, :info) =~ "If your email is in our system"
refute Repo.get_by(Accounts.UserToken, user_id: user.id)
end
test "does not send confirmation token if email is invalid", %{conn: conn} do
conn =
post(conn, Routes.user_confirmation_path(conn, :create), %{
"user" => %{"email" => "[email protected]"}
})
assert redirected_to(conn) == "/"
assert get_flash(conn, :info) =~ "If your email is in our system"
assert Repo.all(Accounts.UserToken) == []
end
end
describe "GET /users/confirm/:token" do
test "renders the confirmation page", %{conn: conn} do
conn = get(conn, Routes.user_confirmation_path(conn, :edit, "some-token"))
response = html_response(conn, 200)
assert response =~ "<h1>Confirm account</h1>"
form_action = Routes.user_confirmation_path(conn, :update, "some-token")
assert response =~ "action=\"#{form_action}\""
end
end
describe "POST /users/confirm/:token" do
test "confirms the given token once", %{conn: conn, user: user} do
token =
extract_user_token(fn url ->
Accounts.deliver_user_confirmation_instructions(user, url)
end)
conn = post(conn, Routes.user_confirmation_path(conn, :update, token))
assert redirected_to(conn) == "/"
assert get_flash(conn, :info) =~ "User confirmed successfully"
assert Accounts.get_user!(user.id).confirmed_at
refute get_session(conn, :user_token)
assert Repo.all(Accounts.UserToken) == []
# When not logged in
conn = post(conn, Routes.user_confirmation_path(conn, :update, token))
assert redirected_to(conn) == "/"
assert get_flash(conn, :error) =~ "User confirmation link is invalid or it has expired"
# When logged in
conn =
build_conn()
|> log_in_user(user)
|> post(Routes.user_confirmation_path(conn, :update, token))
assert redirected_to(conn) == "/"
refute get_flash(conn, :error)
end
test "does not confirm email with invalid token", %{conn: conn, user: user} do
conn = post(conn, Routes.user_confirmation_path(conn, :update, "oops"))
assert redirected_to(conn) == "/"
assert get_flash(conn, :error) =~ "User confirmation link is invalid or it has expired"
refute Accounts.get_user!(user.id).confirmed_at
end
end
end
| 35.084906 | 94 | 0.648024 |
79993ac0d1f5a5bd8bf69a4a09d4ec525ca4bf24 | 27 | ex | Elixir | lib/bloglixir.ex | crertel/bloglixir | 6c8ee7775ee1f76f8bb9c9f8a9db74e6cf5eecc6 | [
"MIT"
] | null | null | null | lib/bloglixir.ex | crertel/bloglixir | 6c8ee7775ee1f76f8bb9c9f8a9db74e6cf5eecc6 | [
"MIT"
] | null | null | null | lib/bloglixir.ex | crertel/bloglixir | 6c8ee7775ee1f76f8bb9c9f8a9db74e6cf5eecc6 | [
"MIT"
] | null | null | null | defmodule Bloglixir do
end
| 9 | 22 | 0.851852 |
79993d725a3a3bc7124c987d59878af108ee7623 | 12,194 | ex | Elixir | clients/you_tube_analytics/lib/google_api/you_tube_analytics/v1/api/groups.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/you_tube_analytics/lib/google_api/you_tube_analytics/v1/api/groups.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/you_tube_analytics/lib/google_api/you_tube_analytics/v1/api/groups.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.YouTubeAnalytics.V1.Api.Groups do
@moduledoc """
API calls for all endpoints tagged `Groups`.
"""
alias GoogleApi.YouTubeAnalytics.V1.Connection
import GoogleApi.YouTubeAnalytics.V1.RequestBuilder
@doc """
Deletes a group.
## Parameters
- connection (GoogleApi.YouTubeAnalytics.V1.Connection): Connection to server
- id (String): The id parameter specifies the YouTube group ID for the group that is being deleted.
- opts (KeywordList): [optional] Optional parameters
- :alt (String): Data format for the response.
- :fields (String): Selector specifying which fields to include in a partial response.
- :key (String): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String): OAuth 2.0 token for the current user.
- :pretty_print (Boolean): Returns response with indentations and line breaks.
- :quota_user (String): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
- :user_ip (String): IP address of the site where the request originates. Use this if you want to enforce per-user limits.
- :on_behalf_of_content_owner (String): Note: This parameter is intended exclusively for YouTube content partners. The onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.
## Returns
{:ok, %{}} on success
{:error, info} on failure
"""
@spec youtube_analytics_groups_delete(Tesla.Env.client, String.t, keyword()) :: {:ok, nil} | {:error, Tesla.Env.t}
def youtube_analytics_groups_delete(connection, id, opts \\ []) do
optional_params = %{
:"alt" => :query,
:"fields" => :query,
:"key" => :query,
:"oauth_token" => :query,
:"prettyPrint" => :query,
:"quotaUser" => :query,
:"userIp" => :query,
:"onBehalfOfContentOwner" => :query
}
%{}
|> method(:delete)
|> url("/groups")
|> add_param(:query, :"id", id)
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(false)
end
@doc """
Creates a group.
## Parameters
- connection (GoogleApi.YouTubeAnalytics.V1.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :alt (String): Data format for the response.
- :fields (String): Selector specifying which fields to include in a partial response.
- :key (String): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String): OAuth 2.0 token for the current user.
- :pretty_print (Boolean): Returns response with indentations and line breaks.
- :quota_user (String): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
- :user_ip (String): IP address of the site where the request originates. Use this if you want to enforce per-user limits.
- :on_behalf_of_content_owner (String): Note: This parameter is intended exclusively for YouTube content partners. The onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.
- :body (Group):
## Returns
{:ok, %GoogleApi.YouTubeAnalytics.V1.Model.Group{}} on success
{:error, info} on failure
"""
@spec youtube_analytics_groups_insert(Tesla.Env.client, keyword()) :: {:ok, GoogleApi.YouTubeAnalytics.V1.Model.Group.t} | {:error, Tesla.Env.t}
def youtube_analytics_groups_insert(connection, opts \\ []) do
optional_params = %{
:"alt" => :query,
:"fields" => :query,
:"key" => :query,
:"oauth_token" => :query,
:"prettyPrint" => :query,
:"quotaUser" => :query,
:"userIp" => :query,
:"onBehalfOfContentOwner" => :query,
:"body" => :body
}
%{}
|> method(:post)
|> url("/groups")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%GoogleApi.YouTubeAnalytics.V1.Model.Group{})
end
@doc """
Returns a collection of groups that match the API request parameters. For example, you can retrieve all groups that the authenticated user owns, or you can retrieve one or more groups by their unique IDs.
## Parameters
- connection (GoogleApi.YouTubeAnalytics.V1.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :alt (String): Data format for the response.
- :fields (String): Selector specifying which fields to include in a partial response.
- :key (String): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String): OAuth 2.0 token for the current user.
- :pretty_print (Boolean): Returns response with indentations and line breaks.
- :quota_user (String): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
- :user_ip (String): IP address of the site where the request originates. Use this if you want to enforce per-user limits.
- :id (String): The id parameter specifies a comma-separated list of the YouTube group ID(s) for the resource(s) that are being retrieved. In a group resource, the id property specifies the group's YouTube group ID.
- :mine (Boolean): Set this parameter's value to true to instruct the API to only return groups owned by the authenticated user.
- :on_behalf_of_content_owner (String): Note: This parameter is intended exclusively for YouTube content partners. The onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.
- :page_token (String): The pageToken parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken property identifies the next page that can be retrieved.
## Returns
{:ok, %GoogleApi.YouTubeAnalytics.V1.Model.GroupListResponse{}} on success
{:error, info} on failure
"""
@spec youtube_analytics_groups_list(Tesla.Env.client, keyword()) :: {:ok, GoogleApi.YouTubeAnalytics.V1.Model.GroupListResponse.t} | {:error, Tesla.Env.t}
def youtube_analytics_groups_list(connection, opts \\ []) do
optional_params = %{
:"alt" => :query,
:"fields" => :query,
:"key" => :query,
:"oauth_token" => :query,
:"prettyPrint" => :query,
:"quotaUser" => :query,
:"userIp" => :query,
:"id" => :query,
:"mine" => :query,
:"onBehalfOfContentOwner" => :query,
:"pageToken" => :query
}
%{}
|> method(:get)
|> url("/groups")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%GoogleApi.YouTubeAnalytics.V1.Model.GroupListResponse{})
end
@doc """
Modifies a group. For example, you could change a group's title.
## Parameters
- connection (GoogleApi.YouTubeAnalytics.V1.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :alt (String): Data format for the response.
- :fields (String): Selector specifying which fields to include in a partial response.
- :key (String): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String): OAuth 2.0 token for the current user.
- :pretty_print (Boolean): Returns response with indentations and line breaks.
- :quota_user (String): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
- :user_ip (String): IP address of the site where the request originates. Use this if you want to enforce per-user limits.
- :on_behalf_of_content_owner (String): Note: This parameter is intended exclusively for YouTube content partners. The onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.
- :body (Group):
## Returns
{:ok, %GoogleApi.YouTubeAnalytics.V1.Model.Group{}} on success
{:error, info} on failure
"""
@spec youtube_analytics_groups_update(Tesla.Env.client, keyword()) :: {:ok, GoogleApi.YouTubeAnalytics.V1.Model.Group.t} | {:error, Tesla.Env.t}
def youtube_analytics_groups_update(connection, opts \\ []) do
optional_params = %{
:"alt" => :query,
:"fields" => :query,
:"key" => :query,
:"oauth_token" => :query,
:"prettyPrint" => :query,
:"quotaUser" => :query,
:"userIp" => :query,
:"onBehalfOfContentOwner" => :query,
:"body" => :body
}
%{}
|> method(:put)
|> url("/groups")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%GoogleApi.YouTubeAnalytics.V1.Model.Group{})
end
end
| 58.344498 | 715 | 0.721502 |
799965d3ca2731346bf78d1491b819ef5e992481 | 1,499 | exs | Elixir | .formatter.exs | kyle5794/ash | 82023da84400366d07001593673d1aaa2a418803 | [
"MIT"
] | null | null | null | .formatter.exs | kyle5794/ash | 82023da84400366d07001593673d1aaa2a418803 | [
"MIT"
] | null | null | null | .formatter.exs | kyle5794/ash | 82023da84400366d07001593673d1aaa2a418803 | [
"MIT"
] | null | null | null | # THIS FILE IS AUTOGENERATED USING `mix ash.formatter`
# DONT MODIFY IT BY HAND
locals_without_parens = [
accept: 1,
action: 1,
allow_nil?: 1,
argument: 2,
argument: 3,
attribute: 2,
attribute: 3,
base_filter: 1,
belongs_to: 2,
belongs_to: 3,
calculate: 2,
calculate: 3,
change: 1,
change: 2,
constraints: 1,
count: 2,
count: 3,
create: 1,
create: 2,
create_timestamp: 1,
create_timestamp: 2,
default: 1,
define_field?: 1,
description: 1,
destination_field: 1,
destination_field_on_join_table: 1,
destroy: 1,
destroy: 2,
event: 1,
expensive?: 1,
field_type: 1,
filter: 1,
generated?: 1,
has_many: 2,
has_many: 3,
has_one: 2,
has_one: 3,
identity: 2,
identity: 3,
join_attributes: 1,
join_relationship: 1,
kind: 1,
many_to_many: 2,
many_to_many: 3,
module: 1,
on: 1,
pagination: 1,
prefix: 1,
primary?: 1,
primary_key?: 1,
private?: 1,
publish: 2,
publish: 3,
publish_all: 2,
publish_all: 3,
read: 1,
read: 2,
required?: 1,
resource: 1,
resource: 2,
soft?: 1,
source_field: 1,
source_field_on_join_table: 1,
table: 1,
through: 1,
type: 1,
update: 1,
update: 2,
update_default: 1,
update_timestamp: 1,
update_timestamp: 2,
validate: 1,
validate: 2,
writable?: 1
]
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"],
locals_without_parens: locals_without_parens,
export: [
locals_without_parens: locals_without_parens
]
]
| 17.229885 | 70 | 0.640427 |
79997d23c4e74cc529fc8142ca157c5c468474ec | 4,461 | ex | Elixir | lib/coxir/struct/member.ex | jos-b/coxir | 2365cd300f8e1775c35c8c50f48dec9bc2ea2342 | [
"Apache-2.0"
] | null | null | null | lib/coxir/struct/member.ex | jos-b/coxir | 2365cd300f8e1775c35c8c50f48dec9bc2ea2342 | [
"Apache-2.0"
] | null | null | null | lib/coxir/struct/member.ex | jos-b/coxir | 2365cd300f8e1775c35c8c50f48dec9bc2ea2342 | [
"Apache-2.0"
] | null | null | null | defmodule Coxir.Struct.Member do
@moduledoc """
Defines methods used to interact with guild members.
Refer to [this](https://discordapp.com/developers/docs/resources/guild#guild-member-object)
for a list of fields and a broader documentation.
In addition, the following fields are also embedded.
- `user` - a user object
- `voice` - a voice channel object
- `roles` - a list of role objects
"""
@type user :: map
@type guild :: map
@type member :: map
use Coxir.Struct
alias Coxir.Struct.{User, Role, Channel}
def pretty(struct) do
struct
|> replace(:user_id, &User.get/1)
|> replace(:voice_id, &Channel.get/1)
|> replace(:roles, &Role.get/1)
end
@doc """
Fetches a cached member object.
Returns an object if found and `nil` otherwise.
"""
@spec get(guild, user) :: map | nil
def get(%{id: server}, %{id: member}),
do: get(server, member)
def get(server, member),
do: get({server, member})
@doc false
def get(id),
do: super(id)
@doc """
Modifies a given member.
Returns the atom `:ok` upon success
or a map containing error information.
#### Params
Must be an enumerable with the fields listed below.
- `nick` - value to set the member's nickname to
- `roles` - list of role ids the member is assigned
- `mute` - whether the member is muted
- `deaf` - whether the member is deafened
- `channel_id` - id of a voice channel to move the member to
Refer to [this](https://discordapp.com/developers/docs/resources/guild#modify-guild-member)
for a broader explanation on the fields and their defaults.
"""
@spec edit(member, Enum.t) :: :ok | map
def edit(%{id: id}, params),
do: edit(id, params)
def edit({guild, user}, params) do
API.request(:patch, "guilds/#{guild}/members/#{user}", params)
end
@doc """
Changes the nickname of a given member.
Returns a map with a `nick` field
or a map containing error information.
"""
@spec set_nick(member, String.t) :: map
def set_nick(%{id: id}, name),
do: set_nick(id, name)
def set_nick({guild, user} = tuple, name) do
params = %{nick: name}
User.get_id()
|> case do
^user ->
API.request(:patch, "guilds/#{guild}/members/@me/nick", params)
_other ->
edit(tuple, params)
end
end
@doc """
Changes the voice channel of a given member.
Returns the atom `:ok` upon success
or a map containing error information.
"""
@spec move(member, String.t) :: :ok | map
def move(member, channel),
do: edit(member, channel_id: channel)
@doc """
Kicks a given member.
Returns the atom `:ok` upon success
or a map containing error information.
"""
@spec kick(member, String.t) :: :ok | map
def kick(term, reason \\ "")
def kick(%{id: id}, reason),
do: kick(id, reason)
def kick({guild, user}, reason) do
API.request(:delete, "guilds/#{guild}/members/#{user}", "", params: [reason: reason])
end
@doc """
Bans a given member.
Returns the atom `:ok` upon success
or a map containing error information.
#### Query
Must be a keyword list with the fields listed below.
- `delete-message-days` - number of days to delete the messages for (0-7)
- `reason` - reason for the ban
"""
@spec ban(member, Keyword.t) :: :ok | map
def ban(%{id: id}, query),
do: ban(id, query)
def ban({guild, user}, query) do
API.request(:put, "guilds/#{guild}/bans/#{user}", "", params: query)
end
@doc """
Adds a role to a given member.
Returns the atom `:ok` upon success
or a map containing error information.
"""
@spec add_role(member, String.t) :: :ok | map
def add_role(%{id: id}, role),
do: add_role(id, role)
def add_role({guild, user}, role) do
API.request(:put, "guilds/#{guild}/members/#{user}/roles/#{role}")
end
@doc """
Removes a role from a given member.
Returns the atom `:ok` upon success
or a map containing error information.
"""
@spec remove_role(member, String.t) :: :ok | map
def remove_role(%{id: id}, role),
do: remove_role(id, role)
def remove_role({guild, user}, role) do
API.request(:delete, "guilds/#{guild}/members/#{user}/roles/#{role}")
end
@doc """
Checks whether a given member has a role.
Returns a boolean.
"""
@spec has_role?(member, String.t) :: boolean
def has_role?(%{roles: roles}, role) do
roles
|> Enum.find(& &1[:id] == role)
!= nil
end
end
| 24.244565 | 93 | 0.63618 |
79999e0ba4efdf46e0002c848e76a48a9b4eaba7 | 2,842 | ex | Elixir | clients/drive/lib/google_api/drive/v3/model/change.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/drive/lib/google_api/drive/v3/model/change.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/drive/lib/google_api/drive/v3/model/change.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Drive.V3.Model.Change do
@moduledoc """
A change to a file or Team Drive.
## Attributes
- file (File): The updated state of the file. Present if the type is file and the file has not been removed from this list of changes. Defaults to: `null`.
- fileId (String.t): The ID of the file which has changed. Defaults to: `null`.
- kind (String.t): Identifies what kind of resource this is. Value: the fixed string \"drive#change\". Defaults to: `null`.
- removed (boolean()): Whether the file or Team Drive has been removed from this list of changes, for example by deletion or loss of access. Defaults to: `null`.
- teamDrive (TeamDrive): The updated state of the Team Drive. Present if the type is teamDrive, the user is still a member of the Team Drive, and the Team Drive has not been removed. Defaults to: `null`.
- teamDriveId (String.t): The ID of the Team Drive associated with this change. Defaults to: `null`.
- time (DateTime.t): The time of this change (RFC 3339 date-time). Defaults to: `null`.
- type (String.t): The type of the change. Possible values are file and teamDrive. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:file => GoogleApi.Drive.V3.Model.File.t(),
:fileId => any(),
:kind => any(),
:removed => any(),
:teamDrive => GoogleApi.Drive.V3.Model.TeamDrive.t(),
:teamDriveId => any(),
:time => DateTime.t(),
:type => any()
}
field(:file, as: GoogleApi.Drive.V3.Model.File)
field(:fileId)
field(:kind)
field(:removed)
field(:teamDrive, as: GoogleApi.Drive.V3.Model.TeamDrive)
field(:teamDriveId)
field(:time, as: DateTime)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.Drive.V3.Model.Change do
def decode(value, options) do
GoogleApi.Drive.V3.Model.Change.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Drive.V3.Model.Change do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.188406 | 205 | 0.702322 |
7999b8da48bb9219341f5bb8b9c5ae8f9f9f21d3 | 1,050 | exs | Elixir | mix.exs | duffelhq/saxy | fb37f1d9ce919e6085a924c2483c515ee6cb997b | [
"MIT"
] | null | null | null | mix.exs | duffelhq/saxy | fb37f1d9ce919e6085a924c2483c515ee6cb997b | [
"MIT"
] | null | null | null | mix.exs | duffelhq/saxy | fb37f1d9ce919e6085a924c2483c515ee6cb997b | [
"MIT"
] | null | null | null | defmodule Saxy.MixProject do
use Mix.Project
@version "0.9.1"
def project() do
[
app: :saxy,
version: @version,
elixir: "~> 1.3",
consolidate_protocols: Mix.env() != :test,
description: description(),
deps: deps(),
package: package(),
name: "Saxy",
docs: docs()
]
end
def application(), do: []
defp description() do
"Saxy is an XML parser and encoder in Elixir that focuses on speed and standard compliance."
end
defp package() do
[
maintainers: ["Cẩm Huỳnh"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/qcam/saxy"}
]
end
defp deps() do
[
{:ex_doc, "~> 0.16", only: :dev, runtime: false},
{:stream_data, "~> 0.4.2", only: :test},
{:sweet_xml, "~> 0.6.5", only: :test}
]
end
defp docs() do
[
main: "Saxy",
extras: [
"guides/getting-started-with-sax.md"
],
source_ref: "v#{@version}",
source_url: "https://github.com/qcam/saxy"
]
end
end
| 19.811321 | 96 | 0.537143 |
7999dfa715b0e093b3a9ae3463fc08a348be9722 | 1,794 | exs | Elixir | mix.exs | waltercompanies/logger_json | bdd9ba56a6c883bb54fb44e67fd6b3eac62dde83 | [
"MIT"
] | null | null | null | mix.exs | waltercompanies/logger_json | bdd9ba56a6c883bb54fb44e67fd6b3eac62dde83 | [
"MIT"
] | null | null | null | mix.exs | waltercompanies/logger_json | bdd9ba56a6c883bb54fb44e67fd6b3eac62dde83 | [
"MIT"
] | null | null | null | defmodule LoggerJSON.Mixfile do
use Mix.Project
@source_url "https://github.com/Nebo15/logger_json"
@version "4.1.0"
def project do
[
app: :logger_json,
version: @version,
elixir: "~> 1.6",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [] ++ Mix.compilers(),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
package: package(),
deps: deps(),
docs: docs(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [coveralls: :test]
]
end
def application do
[
extra_applications: [:logger]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp deps do
[
{:jason, "~> 1.0"},
{:ecto, "~> 2.1 or ~> 3.0", optional: true},
{:plug, "~> 1.0", optional: true},
{:telemetry, "~> 0.4.0", optional: true},
{:ex_doc, ">= 0.15.0", only: [:dev, :test], runtime: false},
{:excoveralls, ">= 0.5.0", only: [:dev, :test]},
{:dialyxir, "~> 0.5", only: [:dev], runtime: false}
]
end
defp package do
[
description: "Console Logger back-end, Plug and Ecto.LogEntry adapter "
<> "that writes logs in JSON format.",
contributors: ["Nebo #15"],
maintainers: ["Nebo #15"],
licenses: ["MIT"],
files: ~w(lib LICENSE.md mix.exs README.md),
links: %{
Changelog: "https://hexdocs.pm/logger_json/changelog.html",
GitHub: @source_url
}
]
end
defp docs do
[
extras: [
"LICENSE.md": [title: "License"],
"README.md": [title: "Overview"]
],
main: "readme",
source_url: @source_url,
source_ref: "v#{@version}",
formatters: ["html"]
]
end
end
| 24.575342 | 77 | 0.545151 |
7999e0bf79648d041a6b212ae5735a5443d7f155 | 4,338 | exs | Elixir | app/test/app_web/controllers/course_controller_test.exs | kljensen/yale-class-chat | b03e72deed967249a64404bff68b1cf22e7e1e6a | [
"Unlicense"
] | 1 | 2020-02-10T21:35:17.000Z | 2020-02-10T21:35:17.000Z | app/test/app_web/controllers/course_controller_test.exs | kljensen/yale-class-chat | b03e72deed967249a64404bff68b1cf22e7e1e6a | [
"Unlicense"
] | 86 | 2020-01-24T14:53:27.000Z | 2021-05-18T19:16:30.000Z | app/test/app_web/controllers/course_controller_test.exs | kljensen/yale-class-chat | b03e72deed967249a64404bff68b1cf22e7e1e6a | [
"Unlicense"
] | null | null | null | defmodule AppWeb.CourseControllerTest do
use AppWeb.ConnCase
alias App.Courses
alias App.AccountsTest, as: ATest
import Plug.Test
def fixture(:semester) do
semester = App.CoursesTest.semester_fixture()
semester
end
@create_attrs %{department: "some department", name: "some name", number: 42, allow_write: true, allow_read: true}
@update_attrs %{department: "some updated department", name: "some updated name", number: 43, allow_write: false, allow_read: false}
@invalid_attrs %{department: nil, name: nil, number: nil, allow_write: nil, allow_read: nil}
def fixture(:course) do
semester = App.CoursesTest.semester_fixture()
user_faculty = ATest.user_fixture(%{is_faculty: true, net_id: "faculty net id"})
{:ok, course} = Courses.create_course(user_faculty, semester, @create_attrs)
course
end
describe "index" do
test "lists all courses", %{conn: conn} do
_course = fixture(:course)
conn = conn
|> init_test_session(uid: "faculty net id")
|> get(Routes.course_path(conn, :index))
assert html_response(conn, 200) =~ "New Course"
end
end
describe "new course" do
test "renders form", %{conn: conn} do
_user_faculty = ATest.user_fixture(%{is_faculty: true, net_id: "faculty net id"})
conn = conn
|> init_test_session(uid: "faculty net id")
|> get(Routes.course_path(conn, :new))
assert html_response(conn, 200) =~ "New Course"
end
end
describe "create course" do
test "redirects to show when data is valid", %{conn: conn} do
_user_faculty = ATest.user_fixture(%{is_faculty: true, net_id: "faculty net id"})
semester = App.CoursesTest.semester_fixture()
attrs = Map.merge(@create_attrs, %{semester_id: semester.id})
conn = conn
|> init_test_session(uid: "faculty net id")
|> post(Routes.course_path(conn, :create), course: attrs)
assert %{id: id} = redirected_params(conn)
assert redirected_to(conn) == Routes.course_path(conn, :show, id)
conn = get(conn, Routes.course_path(conn, :show, id))
assert html_response(conn, 200) =~ "Course Details"
end
test "renders errors when data is invalid", %{conn: conn} do
_user_faculty = ATest.user_fixture(%{is_faculty: true, net_id: "faculty net id"})
semester = App.CoursesTest.semester_fixture()
attrs = Map.merge(@invalid_attrs, %{semester_id: semester.id})
conn = conn
|> init_test_session(uid: "faculty net id")
|> post(Routes.course_path(conn, :create), course: attrs)
assert html_response(conn, 200) =~ "New Course"
end
end
describe "edit course" do
setup [:create_course]
test "renders form for editing chosen course", %{conn: conn, course: course} do
conn = conn
|> init_test_session(uid: "faculty net id")
|> get(Routes.course_path(conn, :edit, course))
assert html_response(conn, 200) =~ "Edit Course"
end
end
describe "update course" do
setup [:create_course]
test "redirects when data is valid", %{conn: conn, course: course} do
conn = conn
|> init_test_session(uid: "faculty net id")
|> put(Routes.course_path(conn, :update, course), course: @update_attrs)
assert redirected_to(conn) == Routes.course_path(conn, :show, course)
conn = get(conn, Routes.course_path(conn, :show, course))
assert html_response(conn, 200) =~ "some updated department"
end
test "renders errors when data is invalid", %{conn: conn, course: course} do
conn = conn
|> init_test_session(uid: "faculty net id")
|> put(Routes.course_path(conn, :update, course), course: @invalid_attrs)
assert html_response(conn, 200) =~ "Edit Course"
end
end
describe "delete course" do
setup [:create_course]
test "deletes chosen course", %{conn: conn, course: course} do
conn = conn
|> init_test_session(uid: "faculty net id")
|> delete(Routes.course_path(conn, :delete, course))
assert redirected_to(conn) == Routes.course_path(conn, :index)
conn = get(conn, Routes.course_path(conn, :show, course))
assert html_response(conn, 404) =~ "Not Found"
end
end
defp create_course(_) do
course = fixture(:course)
{:ok, course: course}
end
end
| 35.557377 | 134 | 0.660904 |
799a3461cacf5e87c43120ed5631028c45aa86db | 775 | ex | Elixir | lib/utils/hgeo.ex | wkhere/eastar | b309f4fce41218d821f4cf4af9df39f5f1a36cc0 | [
"BSD-2-Clause"
] | 18 | 2016-12-22T16:11:39.000Z | 2022-01-28T12:21:14.000Z | lib/utils/hgeo.ex | wkhere/eastar | b309f4fce41218d821f4cf4af9df39f5f1a36cc0 | [
"BSD-2-Clause"
] | 4 | 2016-12-11T17:53:40.000Z | 2019-09-02T09:05:18.000Z | lib/utils/hgeo.ex | wkhere/eastar | b309f4fce41218d821f4cf4af9df39f5f1a36cc0 | [
"BSD-2-Clause"
] | 3 | 2016-12-11T17:44:28.000Z | 2019-08-21T01:35:02.000Z | defmodule Astar.Utils.H.Geo do
@moduledoc """
Exposes H-function as a shortest distance between
two points on Earth globe, plus a number of conversion
functions.
"""
import :math
@pi_by_180 pi()/180
@earth_radius 6371
def to_deg({deg, min, sec}), do:
deg + min/60 + sec/3600
def to_deg(a) when is_number(a), do: a
def to_radian(a), do:
(a |> to_deg) * @pi_by_180
def h({lat1, lon1}, {lat2, lon2}) do
phi1 = lat1 |> to_radian
phi2 = lat2 |> to_radian
dphi = ((lat2 |> to_deg) - (lat1 |> to_deg)) |> to_radian
dl = ((lon2 |> to_deg) - (lon1 |> to_deg)) |> to_radian
sp = sin(dphi/2)
sl = sin(dl/2)
a = sp*sp + cos(phi1)*cos(phi2)*sl*sl
c = 2*atan2(sqrt(a), sqrt(1-a))
@earth_radius * c
end
end
| 22.142857 | 61 | 0.593548 |
799a34c22d22fb590c485b006eec38a5be4ca451 | 982 | ex | Elixir | clients/android_management/lib/google_api/android_management/v1/deserializer.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/android_management/lib/google_api/android_management/v1/deserializer.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/android_management/lib/google_api/android_management/v1/deserializer.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AndroidManagement.V1.Deserializer do
@moduledoc """
Helper functions for deserializing responses into models.
This module is no longer used. Please use GoogleApi.Gax.ModelBase instead.
"""
end
| 37.769231 | 77 | 0.767821 |
799a86cde9770baba883e50e707256e0468accaa | 876 | exs | Elixir | exercises/two-fer/two_fer_test.exs | Tuxified/elixir | 6230e2237851cf35532b6a34e4c67b44a28cde1b | [
"MIT"
] | null | null | null | exercises/two-fer/two_fer_test.exs | Tuxified/elixir | 6230e2237851cf35532b6a34e4c67b44a28cde1b | [
"MIT"
] | null | null | null | exercises/two-fer/two_fer_test.exs | Tuxified/elixir | 6230e2237851cf35532b6a34e4c67b44a28cde1b | [
"MIT"
] | 1 | 2018-07-19T23:43:56.000Z | 2018-07-19T23:43:56.000Z | if !System.get_env("EXERCISM_TEST_EXAMPLES") do
Code.load_file("two_fer.exs", __DIR__)
end
ExUnit.start()
ExUnit.configure(exclude: :pending, trace: true)
defmodule TwoFerTest do
use ExUnit.Case
test "no name given" do
assert TwoFer.two_fer() == "One for you, one for me"
end
@tag :pending
test "a name given" do
assert TwoFer.two_fer("Gilberto Barros") == "One for Gilberto Barros, one for me"
end
@tag :pending
test "when the parameter is a number" do
assert_raise FunctionClauseError, fn ->
TwoFer.two_fer(10)
end
end
@tag :pending
test "when the parameter is an atom" do
assert_raise FunctionClauseError, fn ->
TwoFer.two_fer(:bob)
end
end
@tag :pending
test "when the parameter is a charlist" do
assert_raise FunctionClauseError, fn ->
refute TwoFer.two_fer('Jon Snow')
end
end
end
| 21.365854 | 85 | 0.687215 |
799a97751dc18713afc23c5a25f07ce952250b12 | 2,723 | ex | Elixir | lib/task_bunny/errors.ex | DylanReile/task_bunny | 3c23eb345e18daf9c16c6c295a18499eb8584469 | [
"MIT"
] | 1 | 2021-03-01T20:31:33.000Z | 2021-03-01T20:31:33.000Z | lib/task_bunny/errors.ex | DylanReile/task_bunny | 3c23eb345e18daf9c16c6c295a18499eb8584469 | [
"MIT"
] | null | null | null | lib/task_bunny/errors.ex | DylanReile/task_bunny | 3c23eb345e18daf9c16c6c295a18499eb8584469 | [
"MIT"
] | null | null | null | defmodule TaskBunny.ConfigError do
@moduledoc """
Raised when an error was found on TaskBunny config
"""
defexception [:message]
def exception(message: message) do
title = "Failed to load TaskBunny config"
message = "#{title}\n#{message}"
%__MODULE__{message: message}
end
end
defmodule TaskBunny.Connection.ConnectError do
@moduledoc """
Raised when failed to retain a connection
"""
defexception [:type, :message]
def exception(_opts = [type: type, host: host]) do
title = "Failed to get a connection to host '#{host}'."
detail = case type do
:invalid_host ->
"The host is not defined in config"
:no_connection_process ->
"""
No process running for the host connection.
- Make sure supervisor process is up running.
- You might try to get connection before the process is ready.
"""
:not_connected ->
"""
The connection is not available.
- Check if RabbitMQ host is up running.
- Make sure you can connect to RabbitMQ from the application host.
- You might try to get connection before process is ready.
"""
fallback ->
"#{fallback}"
end
message = "#{title}\n#{detail}"
%__MODULE__{message: message, type: type}
end
end
defmodule TaskBunny.Job.QueueNotFoundError do
@moduledoc """
Raised when failed to find a queue for the job.
"""
defexception [:job, :message]
def exception(job: job) do
title = "Failed to find a queue for the job."
detail = "job=#{job}"
message = "#{title}\n#{detail}"
%__MODULE__{message: message, job: job}
end
end
defmodule TaskBunny.Message.DecodeError do
@moduledoc """
Raised when failed to decode the message.
"""
defexception [:message]
def exception(opts) do
title = "Failed to decode the message."
detail = case opts[:type] do
:job_not_loaded ->
"Job is not valid Elixir module"
:poison_decode_error ->
"Failed to decode the message in JSON. error=#{inspect opts[:error]}"
:decode_error ->
"Failed to decode the message. error=#{inspect opts[:error]}"
fallback ->
"#{fallback}"
end
message = "#{title}\n#{detail}\nmessage body=#{opts[:body]}"
%__MODULE__{message: message}
end
end
defmodule TaskBunny.Publisher.PublishError do
@moduledoc """
Raised when failed to publish the message.
"""
defexception [:message, :inner_error]
def exception(inner_error: inner_error) do
title = "Failed to publish the message."
detail = "error=#{inspect inner_error}"
message = "#{title}\n#{detail}"
%__MODULE__{message: message, inner_error: inner_error}
end
end
| 26.696078 | 77 | 0.647815 |
799ac0228b9abb1c9cfc62550107958440191d87 | 3,246 | ex | Elixir | lib/file_system_utils/list_devices.ex | aseigo/file_system_utils | 2506d75ae7482d97bc041448d5f1b4b9f8716e54 | [
"Apache-2.0"
] | null | null | null | lib/file_system_utils/list_devices.ex | aseigo/file_system_utils | 2506d75ae7482d97bc041448d5f1b4b9f8716e54 | [
"Apache-2.0"
] | null | null | null | lib/file_system_utils/list_devices.ex | aseigo/file_system_utils | 2506d75ae7482d97bc041448d5f1b4b9f8716e54 | [
"Apache-2.0"
] | null | null | null | defmodule FileSystemUtils.ListDevices do
@moduledoc """
Provides functions to get information about devices available in the system.
"""
@spec lsblk(devices :: binary | [binary]) :: map
@doc """
lsblk - list block devices
returns the output of the lsblk command line utilty as json object.
## Arguments
+ devices (default = "")
If device(s) is passed as argument, information is only retrieved for those.
"""
def lsblk(devices \\ ""), do: do_lsblk(parse_device_to_list(devices))
@spec lsblk_only_scsi(devices :: binary | [binary]) :: map
@doc """
Gives json objects that provides information on only the scsi devices
## Arguments
+ devices (default = "")
If device(s) is passed as argument, information is only retrieved for those.
"""
def lsblk_only_scsi(devices \\ ""), do: do_lsblk(["--scsi"] ++ parse_device_to_list(devices))
@spec list_devices_with_label(Boolean) :: [binary]
@doc """
Lists all devices that have a label
## Arguments
- full_path (default = false)
+ false: Return list of device names.
+ true : Returns list of paths to device.
"""
def list_devices_with_label(full_path \\ false)
def list_devices_with_label(false), do: File.ls("/dev/disk/by-label")
def list_devices_with_label(true), do: Path.wildcard("/dev/disk/by-label/*")
@spec list_devices(Boolean) :: [binary]
@doc """
List all the mountable devices in the system.
## Arguments
- full_path (default = false)
+ false: Return list of device names.
+ true : Returns list of paths to device.
"""
def list_devices(full_path \\ false)
def list_devices(true) do
with {:ok, devices} <- list_devices(false),
devices <- Enum.map(devices, &Path.wildcard("/dev/**/#{&1}")) |> List.flatten() do
{:ok, devices}
else
err -> err
end
end
def list_devices(false) do
with {:ok, json} <- do_lsblk(),
names <- get_names(json["blockdevices"]) do
{:ok, names}
else
err -> err
end
end
defp get_names(list_devices) when is_list(list_devices) do
list_devices
|> Enum.map(&get_names/1)
|> List.flatten()
end
defp get_names(device) when is_map(device) do
case Map.has_key?(device, "children") do
true -> get_names(device["children"])
false -> device["name"]
end
end
defp parse_device_to_list(""), do: []
defp parse_device_to_list(device) when not is_list(device), do: [device]
defp parse_device_to_list(devices) when is_list(devices), do: devices
defp do_lsblk(options \\ []) do
with {json, err_code} <-
System.cmd(
"lsblk",
["--json", "--fs"] ++ options,
stderr_to_stdout: true
),
:ok <- parse_error_code(err_code),
{:ok, json} <- Jason.decode(json) do
{:ok, json}
else
err -> err
end
end
defp parse_error_code(0), do: :ok
defp parse_error_code(1), do: {:error, "failure"}
defp parse_error_code(32), do: {:error, "none of specified devices found"}
defp parse_error_code(64), do: {:error, "some specified devices found, some not found"}
defp parse_error_code(_), do: {:error, "Unknown return code"}
end
| 30.055556 | 95 | 0.635551 |
799ac35d5c169759f8372a99a10effeb11dfc68f | 2,861 | exs | Elixir | test_projects/phx_1.6/config/runtime.exs | qhwa/dockerize | d930f06da89a686961da7a5b5bdadb4c9b01ec32 | [
"MIT"
] | 47 | 2020-03-04T00:24:26.000Z | 2022-01-14T23:34:52.000Z | test_projects/phx_1.6/config/runtime.exs | qhwa/dockerize | d930f06da89a686961da7a5b5bdadb4c9b01ec32 | [
"MIT"
] | 2 | 2020-06-09T22:25:06.000Z | 2020-06-30T21:18:37.000Z | test_projects/phx_1.6/config/runtime.exs | qhwa/dockerize | d930f06da89a686961da7a5b5bdadb4c9b01ec32 | [
"MIT"
] | null | null | null | import Config
# config/runtime.exs is executed for all environments, including
# during releases. It is executed after compilation and before the
# system starts, so it is typically used to load production configuration
# and secrets from environment variables or elsewhere. Do not define
# any compile-time configuration in here, as it won't be applied.
# The block below contains prod specific runtime configuration.
if config_env() == :prod do
database_url =
System.get_env("DATABASE_URL") ||
raise """
environment variable DATABASE_URL is missing.
For example: ecto://USER:PASS@HOST/DATABASE
"""
config :phx_proj, PhxProj.Repo,
# ssl: true,
# socket_options: [:inet6],
url: database_url,
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
# The secret key base is used to sign/encrypt cookies and other secrets.
# A default value is used in config/dev.exs and config/test.exs but you
# want to use a different value for prod and you most likely don't want
# to check this value into version control, so we use an environment
# variable instead.
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
config :phx_proj, PhxProjWeb.Endpoint,
http: [
# Enable IPv6 and bind on all interfaces.
# Set it to {0, 0, 0, 0, 0, 0, 0, 1} for local network only access.
# See the documentation on https://hexdocs.pm/plug_cowboy/Plug.Cowboy.html
# for details about using IPv6 vs IPv4 and loopback vs public addresses.
ip: {0, 0, 0, 0, 0, 0, 0, 0},
port: String.to_integer(System.get_env("PORT") || "4000")
],
secret_key_base: secret_key_base
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start each relevant endpoint:
#
# config :phx_proj, PhxProjWeb.Endpoint, server: true
#
# Then you can assemble a release by calling `mix release`.
# See `mix help release` for more information.
# ## Configuring the mailer
#
# In production you need to configure the mailer to use a different adapter.
# Also, you may need to configure the Swoosh API client of your choice if you
# are not using SMTP. Here is an example of the configuration:
#
# config :phx_proj, PhxProj.Mailer,
# adapter: Swoosh.Adapters.Mailgun,
# api_key: System.get_env("MAILGUN_API_KEY"),
# domain: System.get_env("MAILGUN_DOMAIN")
#
# For this example you need include a HTTP client required by Swoosh API client.
# Swoosh supports Hackney and Finch out of the box:
#
# config :swoosh, :api_client, Swoosh.ApiClient.Hackney
#
# See https://hexdocs.pm/swoosh/Swoosh.html#module-installation for details.
end
| 38.662162 | 82 | 0.697658 |
799ae02b6145e85bbc49a59e07eca4efadd23d0b | 1,877 | ex | Elixir | lib/event_store/storage/delete_stream.ex | jsmestad/eventstore | 93660ce316ca174ff4694e211a7ac420253e4dac | [
"MIT"
] | 576 | 2017-11-03T14:11:07.000Z | 2022-03-29T06:18:47.000Z | lib/event_store/storage/delete_stream.ex | jsmestad/eventstore | 93660ce316ca174ff4694e211a7ac420253e4dac | [
"MIT"
] | 129 | 2017-11-08T06:10:20.000Z | 2021-09-15T16:18:14.000Z | lib/event_store/storage/delete_stream.ex | jsmestad/eventstore | 93660ce316ca174ff4694e211a7ac420253e4dac | [
"MIT"
] | 118 | 2017-11-14T14:10:09.000Z | 2022-03-28T13:13:56.000Z | defmodule EventStore.Storage.DeleteStream do
@moduledoc false
require Logger
alias EventStore.Sql.Statements
def soft_delete(conn, stream_id, opts) do
{schema, opts} = Keyword.pop(opts, :schema)
query = Statements.soft_delete_stream(schema)
case Postgrex.query(conn, query, [stream_id], opts) do
{:ok, %Postgrex.Result{num_rows: 1}} ->
Logger.debug(fn -> "Soft deleted stream #{inspect(stream_id)}" end)
:ok
{:ok, %Postgrex.Result{num_rows: 0}} ->
Logger.warn(fn ->
"Failed to soft delete stream #{inspect(stream_id)} due to: stream not found"
end)
{:error, :stream_not_found}
{:error, error} = reply ->
Logger.warn(fn ->
"Failed to soft delete stream #{inspect(stream_id)} due to: " <> inspect(error)
end)
reply
end
end
def hard_delete(conn, stream_id, opts) do
{schema, opts} = Keyword.pop(opts, :schema)
query = Statements.hard_delete_stream(schema)
case Postgrex.query(conn, query, [stream_id], opts) do
{:ok, %Postgrex.Result{num_rows: 1, rows: [[^stream_id]]}} ->
Logger.debug(fn -> "Hard deleted stream #{inspect(stream_id)}" end)
:ok
{:ok, %Postgrex.Result{num_rows: 0}} ->
Logger.warn(fn ->
"Failed to hard delete stream #{inspect(stream_id)} due to: stream not found"
end)
{:error, :stream_not_found}
{:error, %Postgrex.Error{postgres: %{code: :feature_not_supported}} = error} ->
Logger.warn(fn ->
"Failed to hard delete stream #{inspect(stream_id)} due to: " <> inspect(error)
end)
{:error, :not_supported}
{:error, error} = reply ->
Logger.warn(fn ->
"Failed to hard delete stream #{inspect(stream_id)} due to: " <> inspect(error)
end)
reply
end
end
end
| 27.202899 | 89 | 0.602557 |
799af3cc846dc5e314d2351679b0a7a989fbd019 | 2,397 | exs | Elixir | config/runtime.exs | myrrlyn/roadtrip | 424d85dd6a713e99c27f97f052cff88dbbeb6423 | [
"MIT"
] | 2 | 2022-01-19T17:59:21.000Z | 2022-01-19T18:05:05.000Z | config/runtime.exs | myrrlyn/roadtrip | 424d85dd6a713e99c27f97f052cff88dbbeb6423 | [
"MIT"
] | null | null | null | config/runtime.exs | myrrlyn/roadtrip | 424d85dd6a713e99c27f97f052cff88dbbeb6423 | [
"MIT"
] | null | null | null | import Config
# config/runtime.exs is executed for all environments, including
# during releases. It is executed after compilation and before the
# system starts, so it is typically used to load production configuration
# and secrets from environment variables or elsewhere. Do not define
# any compile-time configuration in here, as it won't be applied.
# The block below contains prod specific runtime configuration.
if config_env() == :prod do
database_url =
System.get_env("DATABASE_URL") ||
raise """
environment variable DATABASE_URL is missing.
For example: ecto://USER:PASS@HOST/DATABASE
"""
config :roadtrip, Roadtrip.Repo,
# ssl: true,
# socket_options: [:inet6],
url: database_url,
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
config :roadtrip_web, RoadtripWeb.Endpoint,
http: [
# Enable IPv6 and bind on all interfaces.
# Set it to {0, 0, 0, 0, 0, 0, 0, 1} for local network only access.
ip: {0, 0, 0, 0, 0, 0, 0, 0},
port: String.to_integer(System.get_env("PORT") || "4000")
],
secret_key_base: secret_key_base
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start each relevant endpoint:
#
# config :roadtrip_web, RoadtripWeb.Endpoint, server: true
#
# Then you can assemble a release by calling `mix release`.
# See `mix help release` for more information.
# ## Configuring the mailer
#
# In production you need to configure the mailer to use a different adapter.
# Also, you may need to configure the Swoosh API client of your choice if you
# are not using SMTP. Here is an example of the configuration:
#
# config :roadtrip, Roadtrip.Mailer,
# adapter: Swoosh.Adapters.Mailgun,
# api_key: System.get_env("MAILGUN_API_KEY"),
# domain: System.get_env("MAILGUN_DOMAIN")
#
# For this example you need include a HTTP client required by Swoosh API client.
# Swoosh supports Hackney and Finch out of the box:
#
# config :swoosh, :api_client, Swoosh.ApiClient.Hackney
#
# See https://hexdocs.pm/swoosh/Swoosh.html#module-installation for details.
end
| 35.776119 | 82 | 0.690029 |
799b133e95299b3b5085bddeebe22e2989341eec | 1,841 | ex | Elixir | clients/firestore/lib/google_api/firestore/v1beta1/model/document_transform.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/firestore/lib/google_api/firestore/v1beta1/model/document_transform.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/firestore/lib/google_api/firestore/v1beta1/model/document_transform.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Firestore.V1beta1.Model.DocumentTransform do
@moduledoc """
A transformation of a document.
## Attributes
* `document` (*type:* `String.t`, *default:* `nil`) - The name of the document to transform.
* `fieldTransforms` (*type:* `list(GoogleApi.Firestore.V1beta1.Model.FieldTransform.t)`, *default:* `nil`) - The list of transformations to apply to the fields of the document, in
order.
This must not be empty.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:document => String.t(),
:fieldTransforms => list(GoogleApi.Firestore.V1beta1.Model.FieldTransform.t())
}
field(:document)
field(:fieldTransforms, as: GoogleApi.Firestore.V1beta1.Model.FieldTransform, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Firestore.V1beta1.Model.DocumentTransform do
def decode(value, options) do
GoogleApi.Firestore.V1beta1.Model.DocumentTransform.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Firestore.V1beta1.Model.DocumentTransform do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.403846 | 183 | 0.736556 |
799b3163f673fd466064bb3cfaac06ad94bf19d0 | 70 | ex | Elixir | web/views/email_view.ex | travisboettcher/futurecyborg-blog | 927c26ee9e94f0739847aa649050b5263ece9239 | [
"MIT"
] | null | null | null | web/views/email_view.ex | travisboettcher/futurecyborg-blog | 927c26ee9e94f0739847aa649050b5263ece9239 | [
"MIT"
] | null | null | null | web/views/email_view.ex | travisboettcher/futurecyborg-blog | 927c26ee9e94f0739847aa649050b5263ece9239 | [
"MIT"
] | null | null | null | defmodule FutureCyborg.EmailView do
use FutureCyborg.Web, :view
end
| 17.5 | 35 | 0.814286 |
799b6225cfc4cf92959355e08f04525666753538 | 2,600 | ex | Elixir | lib/auto_api/universal_properties_meta.ex | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | 4 | 2018-01-19T16:11:10.000Z | 2019-12-13T16:35:10.000Z | lib/auto_api/universal_properties_meta.ex | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | 5 | 2020-07-16T07:20:21.000Z | 2021-09-22T10:18:04.000Z | lib/auto_api/universal_properties_meta.ex | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | 1 | 2021-02-17T18:36:13.000Z | 2021-02-17T18:36:13.000Z | # AutoAPI
# The MIT License
#
# Copyright (c) 2018- High-Mobility GmbH (https://high-mobility.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
defmodule AutoApi.UniversalProperties.Meta do
@moduledoc false
# credo:disable-for-this-file Credo.Check.Refactor.CyclomaticComplexity
@spec_file "specs/misc/universal_properties.json"
@external_resource @spec_file
defmacro __before_compile__(_env) do
raw_spec = Jason.decode!(File.read!(@spec_file))
properties = raw_spec["universal_properties"]
base_functions =
quote do
@properties unquote(Macro.escape(properties))
|> Enum.map(fn prop ->
{prop["id"], String.to_atom(prop["name"])}
end)
@doc """
Returns all universal properties
# Example
iex> properties = AutoApi.UniversalProperties.all()
iex> length(properties)
5
iex> List.last(properties)
{0xA4, :brand}
"""
@spec all() :: list({0..255, atom()})
def all(), do: @properties
@doc false
@spec raw_spec() :: map()
def raw_spec, do: unquote(Macro.escape(raw_spec))
@doc false
@spec property_spec(atom()) :: map()
def property_spec(name)
end
property_functions =
for prop <- properties do
prop_name = String.to_atom(prop["name"])
quote do
def property_spec(unquote(prop_name)), do: unquote(Macro.escape(prop))
end
end
[base_functions, property_functions]
end
end
| 33.333333 | 80 | 0.671923 |
799b73851804ba03f91e60e7558cda2cbde6e6b3 | 384 | exs | Elixir | priv/repo/migrations/20220312193533_locations_table.exs | bikebrigade/dispatch | eb622fe4f6dab7c917d678d3d7a322a01f97da44 | [
"Apache-2.0"
] | 28 | 2021-10-11T01:53:53.000Z | 2022-03-24T17:45:55.000Z | priv/repo/migrations/20220312193533_locations_table.exs | bikebrigade/dispatch | eb622fe4f6dab7c917d678d3d7a322a01f97da44 | [
"Apache-2.0"
] | 20 | 2021-10-21T08:12:31.000Z | 2022-03-31T13:35:53.000Z | priv/repo/migrations/20220312193533_locations_table.exs | bikebrigade/dispatch | eb622fe4f6dab7c917d678d3d7a322a01f97da44 | [
"Apache-2.0"
] | null | null | null | defmodule BikeBrigade.Repo.Migrations.LocationsTable do
use Ecto.Migration
def change do
create table(:locations) do
add :address, :string
add :city, :string
add :postal, :string
add :province, :string
add :country, :string
add :unit, :string
add :buzzer, :string
add :coords, :geography
timestamps()
end
end
end
| 19.2 | 55 | 0.627604 |
799b9f59568b2f2bd53a8539321334293b364b81 | 699 | ex | Elixir | lib/tnd_web/gettext.ex | tndrpg/tnd | a9a348ed7ce2f3d8f55046559f9551e2607f3236 | [
"0BSD"
] | null | null | null | lib/tnd_web/gettext.ex | tndrpg/tnd | a9a348ed7ce2f3d8f55046559f9551e2607f3236 | [
"0BSD"
] | 1 | 2021-05-11T14:31:58.000Z | 2021-05-11T14:31:58.000Z | lib/tnd_web/gettext.ex | tndrpg/tnd | a9a348ed7ce2f3d8f55046559f9551e2607f3236 | [
"0BSD"
] | null | null | null | defmodule TndWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import TndWeb.Gettext
# Simple translation
gettext("Here is the string to translate")
# Plural translation
ngettext("Here is the string to translate",
"Here are the strings to translate",
3)
# Domain-based translation
dgettext("errors", "Here is the error message to translate")
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :tnd
end
| 27.96 | 72 | 0.672389 |
799bf35aaa1cb3b91c6f47e603cc8c8229ebd5f0 | 20,140 | ex | Elixir | apps/ello_core/test/support/factory.ex | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 16 | 2017-06-21T21:31:20.000Z | 2021-05-09T03:23:26.000Z | apps/ello_core/test/support/factory.ex | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 25 | 2017-06-07T12:18:28.000Z | 2018-06-08T13:27:43.000Z | apps/ello_core/test/support/factory.ex | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 3 | 2018-06-14T15:34:07.000Z | 2022-02-28T21:06:13.000Z | defmodule Ello.Core.Factory do
alias Ello.Core.{Repo, Discovery, Network, Content, Contest}
alias Ello.Core.FactoryTime
alias Discovery.{Category, Promotional, Editorial, PagePromotional, CategoryPost}
alias Network.{User, Relationship, Flag, CategoryUser}
alias Content.{Post, Love, Watch, Asset}
alias Contest.{ArtistInvite, ArtistInviteSubmission}
use ExMachina.Ecto, repo: Repo
def user_factory do
%User{
username: sequence(:username, &"username#{&1}"),
email: sequence(:user_email, &"user-#{&1}@example.com"),
email_hash: sequence(:user_email_hash, &"emailhash#{&1}"),
settings: %User.Settings{},
created_at: FactoryTime.now,
updated_at: FactoryTime.now,
}
|> Repo.preload(:categories)
|> User.load_images
end
def settings_factory do
%User.Settings{}
end
def flag_factory do
%Flag{
reporting_user: insert(:user, is_staff: true),
subject_user: insert(:user),
verified: true,
kind: "spam",
resolved_at: FactoryTime.now,
created_at: FactoryTime.now,
updated_at: FactoryTime.now,
}
end
def post_factory do
%Post{
author: build(:user),
token: sequence(:post_token, &"testtoken#{&1}wouldberandom"),
seo_title: "test post",
is_adult_content: false,
is_disabled: false,
has_nudity: false,
is_saleable: false,
loves_count: 1,
comments_count: 2,
reposts_count: 3,
views_count: 4_123,
body: [%{"kind" => "text", "data" => "Phrasing!"}],
rendered_content: [%{
"kind" => "text",
"data" => "<p>Phrasing!</p>",
"link_url" => nil
}],
rendered_summary: [%{
"kind" => "text",
"data" => "<p>Phrasing!</p>",
"link_url" => nil
}],
reposted_source: nil,
parent_post: nil,
created_at: FactoryTime.now,
updated_at: FactoryTime.now,
}
end
@doc "add 2 assets to a post"
def add_assets(%Post{} = post) do
add_assets(post, [insert(:asset, post: post), insert(:asset, post: post)])
end
@doc "add given assets to a post"
def add_assets(%Post{body: body, rendered_content: rendered} = post, assets) do
new_bodies = Enum.map assets, fn(%{id: id}) ->
%{"kind" => "image", "data" => %{asset_id: id, url: "skipped"}}
end
new_content = Enum.map assets, fn(%{id: id}) ->
%{"kind" => "image", "data" => %{url: "www.asdf.com", alt: "asdf"}, "links" => %{"assets" => "#{id}"}}
end
post
|> Ecto.Changeset.change(body: new_bodies ++ body)
|> Ecto.Changeset.change(rendered_content: new_content ++ rendered)
|> Ecto.Changeset.change(rendered_summary: new_content ++ rendered)
|> Repo.update!
|> Repo.preload(:assets)
end
def repost_factory do
post_factory()
|> Map.merge(%{
reposted_source: build(:post)
})
end
def asset_factory do
%Asset{
user: build(:user),
post: build(:post),
attachment: "ello-a9c0ede1-aeca-45af-9723-5750babf541e.jpeg",
attachment_metadata: %{
"optimized" => %{"size"=>433_286, "type"=>"image/jpeg", "width"=>1_280, "height"=>1_024},
"xhdpi" => %{"size"=>434_916, "type"=>"image/jpeg", "width"=>1_280, "height"=>1_024},
"hdpi" => %{"size"=>287_932, "type"=>"image/jpeg", "width"=>750, "height"=>600},
"mdpi" => %{"size"=>77_422, "type"=>"image/jpeg", "width"=>375, "height"=>300},
"ldpi" => %{"size"=>19_718, "type"=>"image/jpeg", "width"=>180, "height"=>144}
},
created_at: FactoryTime.now,
updated_at: FactoryTime.now,
} |> Asset.build_attachment
end
def gif_asset_factory do
%Asset{
user: build(:user),
post: build(:post),
attachment: "ello-a9c0ede1-aeca-45af-9723-5750babf541e.gif",
attachment_metadata: %{
"optimized" => %{"size"=>433_286, "type"=>"image/gif", "width"=>1_280, "height"=>1_024},
"xhdpi" => %{"size"=>434_916, "type"=>"image/jpeg", "width"=>1_280, "height"=>1_024},
"hdpi" => %{"size"=>287_932, "type"=>"image/jpeg", "width"=>750, "height"=>600},
"mdpi" => %{"size"=>77_422, "type"=>"image/jpeg", "width"=>375, "height"=>300},
"ldpi" => %{"size"=>19_718, "type"=>"image/jpeg", "width"=>180, "height"=>144}
},
created_at: FactoryTime.now,
updated_at: FactoryTime.now,
} |> Asset.build_attachment
end
def category_post_factory do
%CategoryPost{
status: "submitted",
category: build(:category),
post: build(:post),
submitted_at: FactoryTime.now,
}
end
def category_user_factory do
%CategoryUser{
role: "featured",
user: build(:user),
category: build(:category),
}
end
def featured_category_post_factory do
%CategoryPost{
status: "featured",
category: build(:category),
post: build(:post),
submitted_at: FactoryTime.now,
featured_at: FactoryTime.now,
}
end
def editorial_factory do
%Editorial{
one_by_one_image: "ello-a9c0ede1-aeca-45af-9723-5750babf541e.jpeg",
one_by_two_image: "ello-a9c0ede1-aeca-45af-9723-5750babf541e.jpeg",
two_by_one_image: "ello-a9c0ede1-aeca-45af-9723-5750babf541e.jpeg",
two_by_two_image: "ello-a9c0ede1-aeca-45af-9723-5750babf541e.jpeg",
one_by_one_image_metadata: %{
"optimized" => %{"size" => 555_555, "type" => "image/jpeg", "width" => 1920, "height" => 1920},
"xhdpi" => %{"size" => 444_444, "type" => "image/jpeg", "width" => 1500, "height" => 1500},
"hdpi" => %{"size" => 333_333, "type" => "image/jpeg", "width" => 750, "height" => 750},
"mdpi" => %{"size" => 222_222, "type" => "image/jpeg", "width" => 375, "height" => 375},
"ldpi" => %{"size" => 111_111, "type" => "image/jpeg", "width" => 190, "height" => 190},
},
one_by_two_image_metadata: %{
"optimized" => %{"size" => 555_555, "type" => "image/jpeg", "width" => 1920, "height" => 3840},
"xhdpi" => %{"size" => 444_444, "type" => "image/jpeg", "width" => 1500, "height" => 3000},
"hdpi" => %{"size" => 333_333, "type" => "image/jpeg", "width" => 750, "height" => 1500},
"mdpi" => %{"size" => 222_222, "type" => "image/jpeg", "width" => 375, "height" => 750},
"ldpi" => %{"size" => 111_111, "type" => "image/jpeg", "width" => 190, "height" => 380},
},
two_by_one_image_metadata: %{
"optimized" => %{"size" => 555_555, "type" => "image/jpeg", "width" => 1920, "height" => 960},
"xhdpi" => %{"size" => 444_444, "type" => "image/jpeg", "width" => 1500, "height" => 750},
"hdpi" => %{"size" => 333_333, "type" => "image/jpeg", "width" => 750, "height" => 375},
"mdpi" => %{"size" => 222_222, "type" => "image/jpeg", "width" => 375, "height" => 188},
"ldpi" => %{"size" => 111_111, "type" => "image/jpeg", "width" => 190, "height" => 95},
},
two_by_two_image_metadata: %{
"optimized" => %{"size" => 555_555, "type" => "image/jpeg", "width" => 1920, "height" => 1920},
"xhdpi" => %{"size" => 444_444, "type" => "image/jpeg", "width" => 1500, "height" => 1500},
"hdpi" => %{"size" => 333_333, "type" => "image/jpeg", "width" => 750, "height" => 750},
"mdpi" => %{"size" => 222_222, "type" => "image/jpeg", "width" => 375, "height" => 375},
"ldpi" => %{"size" => 111_111, "type" => "image/jpeg", "width" => 190, "height" => 190},
},
}
end
def post_editorial_factory do
Map.merge(editorial_factory(), %{
post: build(:post),
kind: "post",
content: %{
"title" => "Post Editorial",
"subtitle" => "check *it* out",
"plaintext_subtitle" => "check it out",
"rendered_subtitle" => "<p>check <em>it</em> out</p>",
}
})
end
def external_editorial_factory do
Map.merge(editorial_factory(), %{
kind: "external",
content: %{
"title" => "External Editorial",
"subtitle" => "check *it* out",
"plaintext_subtitle" => "check it out",
"rendered_subtitle" => "<p>check <em>it</em> out</p>",
"url" => "https://ello.co/wtf",
}
})
end
def sponsored_editorial_factory do
Map.merge(editorial_factory(), %{
kind: "sponsored",
content: %{
"title" => "Sponsored Editorial",
"subtitle" => "check *it* out",
"plaintext_subtitle" => "check it out",
"rendered_subtitle" => "<p>check <em>it</em> out</p>",
"url" => "https://ello.co/wtf",
}
})
end
def internal_editorial_factory do
Map.merge(editorial_factory(), %{
kind: "internal",
content: %{
"title" => "Internal Editorial",
"subtitle" => "check *it* out",
"plaintext_subtitle" => "check it out",
"rendered_subtitle" => "<p>check <em>it</em> out</p>",
"path" => "/discover/recent",
}
})
end
def category_editorial_factory do
Map.merge(editorial_factory(), %{
kind: "category",
content: %{
"title" => "Category Editorial",
"slug" => "shop",
}
})
end
def artist_invite_editorial_factory do
Map.merge(editorial_factory(), %{
kind: "artist_invite",
content: %{
"title" => "Artist Invite Editorial",
"slug" => "nfp-100",
}
})
end
def curated_posts_editorial_factory do
Map.merge(editorial_factory(), %{
kind: "curated_posts",
content: %{
"title" => "Curated Posts Editorial",
"post_tokens" => [insert(:post).token, insert(:post).token]
}
})
end
def following_editorial_factory do
Map.merge(editorial_factory(), %{
kind: "following",
content: %{
"title" => "Following Editorial",
}
})
end
def invite_join_editorial_factory do
Map.merge(editorial_factory(), %{
kind: "invite_join",
content: %{
"title" => "Join or Invite Editorial",
}
})
end
def love_factory do
%Love{
user: build(:user),
post: build(:post),
created_at: FactoryTime.now,
updated_at: FactoryTime.now,
}
end
def watch_factory do
%Watch{
user: build(:user),
post: build(:post),
created_at: FactoryTime.now,
updated_at: FactoryTime.now,
}
end
def comment_factory do
post_factory()
|> Map.merge(%{
parent_post: build(:post)
})
end
def category_factory do
%Category{
name: sequence(:category_name, &"category#{&1}"),
slug: sequence(:category_slug, &"category#{&1}"),
roshi_slug: sequence(:category_roshi_slug, &"category#{&1}"),
description: "Posts about this categories",
is_sponsored: false,
is_creator_type: false,
level: "Primary",
order: Enum.random(0..10),
uses_page_promotionals: false,
promotionals: [build(:promotional)],
created_at: FactoryTime.now,
updated_at: FactoryTime.now,
} |> Category.load_images
end
def promotional_factory do
%Promotional{
image: "ello-optimized-da955f87.jpg",
image_metadata: %{},
post_token: "abc-123",
user: build(:user),
created_at: FactoryTime.now,
updated_at: FactoryTime.now,
} |> Promotional.load_images
end
def page_promotional_factory do
%PagePromotional{
header: "Header",
subheader: "Sub Header",
cta_href: nil,
cta_caption: nil,
is_logged_in: false,
is_editorial: false,
is_artist_invite: false,
is_authentication: false,
image: "ello-optimized-da955f87.jpg",
image_metadata: %{},
post_token: "abc-123",
user: build(:user),
created_at: FactoryTime.now,
updated_at: FactoryTime.now,
} |> PagePromotional.load_images
end
def relationship_factory do
%Relationship{
priority: "friend",
owner: build(:user),
subject: build(:user),
created_at: FactoryTime.now,
updated_at: FactoryTime.now,
}
end
def artist_invite_factory do
%ArtistInvite{
title: "Foo Brand",
meta_title: "Foo Brand Art Exhibition Contest",
invite_type: "Art Exhibition",
slug: sequence(:slug, &"foo-brand-#{&1}"),
brand_account: build(:user),
opened_at: FactoryTime.now,
closed_at: FactoryTime.now,
status: "open",
raw_description: "Foo brand is looking for artists",
rendered_description: "<p>Foo brand is looking for artists</p>",
meta_description: "Foo brand wants to pay you to exhibit your art. Enter to win.",
raw_short_description: "Bar",
rendered_short_description: "<p>Bar</p>",
submission_body_block: "#FooBrand @FooBrand",
guide: [%{title: "How To Submit", raw_body: "To submit...", rendered_body: "<p>To submit...</p>"}],
header_image: "ello-e76606cf-44b0-48b5-9918-1efad8e0272c.jpeg",
header_image_metadata: %{
"optimized" => %{
"size" => 1_177_127,
"type" => "image/jpeg",
"width" => 1_880,
"height" => 1_410
},
"xhdpi" => %{
"size" => 582_569,
"type" => "image/jpeg",
"width" => 1_116,
"height" => 837
},
"hdpi" => %{
"size" => 150_067,
"type" => "image/jpeg",
"width" => 552,
"height" => 414
},
"mdpi" => %{
"size" => 40_106,
"type" => "image/jpeg",
"width" => 276,
"height" => 207
},
"ldpi" => %{
"size" => 10_872,
"type" => "image/jpeg",
"width" => 132,
"height" => 99
}
},
logo_image: "ello-a9c0ede1-aeca-45af-9723-5750babf541e.jpeg",
logo_image_metadata: %{
"optimized" => %{"size" => 555_555, "type" => "image/jpeg", "width" => 1920, "height" => 1920},
"xhdpi" => %{"size" => 444_444, "type" => "image/jpeg", "width" => 1500, "height" => 1500},
"hdpi" => %{"size" => 333_333, "type" => "image/jpeg", "width" => 750, "height" => 750},
"mdpi" => %{"size" => 222_222, "type" => "image/jpeg", "width" => 375, "height" => 375},
"ldpi" => %{"size" => 111_111, "type" => "image/jpeg", "width" => 190, "height" => 190},
},
og_image: "ello-a9c0ede1-aeca-45af-9723-5750babf541e.jpeg",
og_image_metadata: %{
"optimized" => %{"size" => 555_555, "type" => "image/jpeg", "width" => 1920, "height" => 1920},
"xhdpi" => %{"size" => 444_444, "type" => "image/jpeg", "width" => 1500, "height" => 1500},
"hdpi" => %{"size" => 333_333, "type" => "image/jpeg", "width" => 750, "height" => 750},
"mdpi" => %{"size" => 222_222, "type" => "image/jpeg", "width" => 375, "height" => 375},
"ldpi" => %{"size" => 111_111, "type" => "image/jpeg", "width" => 190, "height" => 190},
},
created_at: FactoryTime.now,
updated_at: FactoryTime.now,
}
end
def artist_invite_submission_factory do
%ArtistInviteSubmission{
artist_invite: build(:artist_invite),
post: build(:post),
status: "unapproved",
created_at: FactoryTime.now,
updated_at: FactoryTime.now,
}
end
defmodule Script do
use ExMachina.Ecto, repo: Repo
def archer_factory do
%User{
id: 42,
username: "archer",
name: "Sterling Archer",
email: "[email protected]",
email_hash: "archerelloco",
bad_for_seo?: false,
location: "New York, NY",
short_bio: "I have been spying for a while now",
formatted_short_bio: "<p>I have been spying for a while now</p>",
links: "http://www.twitter.com/ArcherFX",
rendered_links: [
%{"url"=>"http://www.twitter.com/ArcherFX",
"text"=>"twitter.com/ArcherFX",
"type"=>"Twitter",
"icon"=>"https://social-icons.ello.co/twitter.png"},
],
avatar: "ello-2274bdfe-57d8-4499-ba67-a7c003d5a962.png",
created_at: FactoryTime.now,
updated_at: FactoryTime.now,
avatar_metadata: %{
"large" => %{
"size" => 220_669,
"type" => "image/png",
"width" => 360,
"height" => 360
},
"regular" => %{
"size" => 36_629,
"type" => "image/png",
"width" => 120,
"height" => 120
},
"small" => %{
"size" => 17_753,
"type" => "image/png",
"width" => 60,
"height" => 60
}
},
cover_image: "ello-e76606cf-44b0-48b5-9918-1efad8e0272c.jpeg",
cover_image_metadata: %{
"optimized" => %{
"size" => 1_177_127,
"type" => "image/jpeg",
"width" => 1_880,
"height" => 1_410
},
"xhdpi" => %{
"size" => 582_569,
"type" => "image/jpeg",
"width" => 1_116,
"height" => 837
},
"hdpi" => %{
"size" => 150_067,
"type" => "image/jpeg",
"width" => 552,
"height" => 414
},
"mdpi" => %{
"size" => 40_106,
"type" => "image/jpeg",
"width" => 276,
"height" => 207
},
"ldpi" => %{
"size" => 10_872,
"type" => "image/jpeg",
"width" => 132,
"height" => 99
}
},
settings: %User.Settings{
views_adult_content: true,
}
}
|> Repo.preload(:categories)
|> User.load_images
end
def featured_category_factory do
%Category{
name: "Featured",
slug: "featured",
cta_caption: nil,
cta_href: nil,
description: nil,
is_sponsored: false,
is_creator_type: false,
level: "meta",
order: 0,
uses_page_promotionals: true,
created_at: FactoryTime.now,
updated_at: FactoryTime.now,
} |> Category.load_images
end
def espionage_category_factory do
%Category{
id: 100_000,
name: "Espionage",
slug: "espionage",
cta_caption: nil,
cta_href: nil,
description: "All things spying related",
is_sponsored: false,
is_creator_type: false,
level: nil,
order: 0,
uses_page_promotionals: false,
created_at: FactoryTime.now,
updated_at: FactoryTime.now,
promotionals: [],
} |> Category.load_images
end
def lacross_category_factory do
%Category{
id: 100_001,
name: "Lacross",
slug: "lacross",
cta_caption: nil,
cta_href: nil,
description: "All things lacross related",
is_sponsored: false,
is_creator_type: false,
level: "Primary",
order: 0,
uses_page_promotionals: false,
created_at: FactoryTime.now,
updated_at: FactoryTime.now,
tile_image: "ello-optimized-8bcedb76.jpg",
tile_image_metadata: %{
"large" => %{
"size" => 855_144,
"type" => "image/png",
"width" => 1_000,
"height" => 1_000
},
"regular" => %{
"size" => 556_821,
"type" => "image/png",
"width" => 800,
"height" => 800
},
"small" => %{
"size" => 126_225,
"type" => "image/png",
"width" => 360,
"height" => 360
},
},
promotionals: [Ello.Core.Factory.build(:promotional)]
} |> Category.load_images
end
end
end
| 32.224 | 108 | 0.528997 |
799bff3c1deb8419859742933ad509fcce54b940 | 6,164 | ex | Elixir | clients/dataproc/lib/google_api/dataproc/v1/model/job.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dataproc/lib/google_api/dataproc/v1/model/job.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dataproc/lib/google_api/dataproc/v1/model/job.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Dataproc.V1.Model.Job do
@moduledoc """
A Cloud Dataproc job resource.
## Attributes
* `driverControlFilesUri` (*type:* `String.t`, *default:* `nil`) - Output only. If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
* `driverOutputResourceUri` (*type:* `String.t`, *default:* `nil`) - Output only. A URI pointing to the location of the stdout of the job's driver program.
* `hadoopJob` (*type:* `GoogleApi.Dataproc.V1.Model.HadoopJob.t`, *default:* `nil`) - Job is a Hadoop job.
* `hiveJob` (*type:* `GoogleApi.Dataproc.V1.Model.HiveJob.t`, *default:* `nil`) - Job is a Hive job.
* `jobUuid` (*type:* `String.t`, *default:* `nil`) - Output only. A UUID that uniquely identifies a job within the project over time. This is in contrast to a user-settable reference.job_id that may be reused over time.
* `labels` (*type:* `map()`, *default:* `nil`) - Optional. The labels to associate with this job. Label keys must contain 1 to 63 characters, and must conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt). Label values may be empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be associated with a job.
* `pigJob` (*type:* `GoogleApi.Dataproc.V1.Model.PigJob.t`, *default:* `nil`) - Job is a Pig job.
* `placement` (*type:* `GoogleApi.Dataproc.V1.Model.JobPlacement.t`, *default:* `nil`) - Required. Job information, including how, when, and where to run the job.
* `pysparkJob` (*type:* `GoogleApi.Dataproc.V1.Model.PySparkJob.t`, *default:* `nil`) - Job is a Pyspark job.
* `reference` (*type:* `GoogleApi.Dataproc.V1.Model.JobReference.t`, *default:* `nil`) - Optional. The fully qualified reference to the job, which can be used to obtain the equivalent REST path of the job resource. If this property is not specified when a job is created, the server generates a <code>job_id</code>.
* `scheduling` (*type:* `GoogleApi.Dataproc.V1.Model.JobScheduling.t`, *default:* `nil`) - Optional. Job scheduling configuration.
* `sparkJob` (*type:* `GoogleApi.Dataproc.V1.Model.SparkJob.t`, *default:* `nil`) - Job is a Spark job.
* `sparkSqlJob` (*type:* `GoogleApi.Dataproc.V1.Model.SparkSqlJob.t`, *default:* `nil`) - Job is a SparkSql job.
* `status` (*type:* `GoogleApi.Dataproc.V1.Model.JobStatus.t`, *default:* `nil`) - Output only. The job status. Additional application-specific status information may be contained in the <code>type_job</code> and <code>yarn_applications</code> fields.
* `statusHistory` (*type:* `list(GoogleApi.Dataproc.V1.Model.JobStatus.t)`, *default:* `nil`) - Output only. The previous job status.
* `yarnApplications` (*type:* `list(GoogleApi.Dataproc.V1.Model.YarnApplication.t)`, *default:* `nil`) - Output only. The collection of YARN applications spun up by this job.Beta Feature: This report is available for testing purposes only. It may be changed before final release.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:driverControlFilesUri => String.t(),
:driverOutputResourceUri => String.t(),
:hadoopJob => GoogleApi.Dataproc.V1.Model.HadoopJob.t(),
:hiveJob => GoogleApi.Dataproc.V1.Model.HiveJob.t(),
:jobUuid => String.t(),
:labels => map(),
:pigJob => GoogleApi.Dataproc.V1.Model.PigJob.t(),
:placement => GoogleApi.Dataproc.V1.Model.JobPlacement.t(),
:pysparkJob => GoogleApi.Dataproc.V1.Model.PySparkJob.t(),
:reference => GoogleApi.Dataproc.V1.Model.JobReference.t(),
:scheduling => GoogleApi.Dataproc.V1.Model.JobScheduling.t(),
:sparkJob => GoogleApi.Dataproc.V1.Model.SparkJob.t(),
:sparkSqlJob => GoogleApi.Dataproc.V1.Model.SparkSqlJob.t(),
:status => GoogleApi.Dataproc.V1.Model.JobStatus.t(),
:statusHistory => list(GoogleApi.Dataproc.V1.Model.JobStatus.t()),
:yarnApplications => list(GoogleApi.Dataproc.V1.Model.YarnApplication.t())
}
field(:driverControlFilesUri)
field(:driverOutputResourceUri)
field(:hadoopJob, as: GoogleApi.Dataproc.V1.Model.HadoopJob)
field(:hiveJob, as: GoogleApi.Dataproc.V1.Model.HiveJob)
field(:jobUuid)
field(:labels, type: :map)
field(:pigJob, as: GoogleApi.Dataproc.V1.Model.PigJob)
field(:placement, as: GoogleApi.Dataproc.V1.Model.JobPlacement)
field(:pysparkJob, as: GoogleApi.Dataproc.V1.Model.PySparkJob)
field(:reference, as: GoogleApi.Dataproc.V1.Model.JobReference)
field(:scheduling, as: GoogleApi.Dataproc.V1.Model.JobScheduling)
field(:sparkJob, as: GoogleApi.Dataproc.V1.Model.SparkJob)
field(:sparkSqlJob, as: GoogleApi.Dataproc.V1.Model.SparkSqlJob)
field(:status, as: GoogleApi.Dataproc.V1.Model.JobStatus)
field(:statusHistory, as: GoogleApi.Dataproc.V1.Model.JobStatus, type: :list)
field(:yarnApplications, as: GoogleApi.Dataproc.V1.Model.YarnApplication, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Dataproc.V1.Model.Job do
def decode(value, options) do
GoogleApi.Dataproc.V1.Model.Job.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataproc.V1.Model.Job do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 67 | 413 | 0.713822 |
799c0dad6c67a08e129369edd9b29a2a178999b3 | 2,576 | exs | Elixir | app/priv/repo/seeds.exs | gkzz/jtfparty | d1190b9988857980e0a83cca877d1288f66a8a94 | [
"MIT"
] | null | null | null | app/priv/repo/seeds.exs | gkzz/jtfparty | d1190b9988857980e0a83cca877d1288f66a8a94 | [
"MIT"
] | 17 | 2020-08-16T12:47:00.000Z | 2021-04-06T17:09:15.000Z | app/priv/repo/seeds.exs | gkzz/jtfparty | d1190b9988857980e0a83cca877d1288f66a8a94 | [
"MIT"
] | 1 | 2020-08-07T04:33:52.000Z | 2020-08-07T04:33:52.000Z | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Jtfparty.Repo.insert!(%Jtfparty.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
alias Jtfparty.Repo
alias Jtfparty.Event
alias Jtfparty.Sessions.Session
# Events
jtf2021w = %Event{ name: "JTF2021w" }
Repo.insert!(jtf2021w)
# sample data
Repo.insert!(%Session{
event_id: jtf2021w.id, place: "A", timebox: "13:00-13:45", time: 45,
title: "Design Proposal は文化を創る", youtube_url: "NdGN1z0q67U"
})
Repo.insert!(%Session{
event_id: jtf2021w.id, place: "A", timebox: "13:55-14:40", time: 45,
title: "エンジニア以外の方が自らSQLを使ってセグメント分析を行うカルチャーをどのように作っていったか", youtube_url: "NdGN1z0q67U"
})
Repo.insert!(%Session{
event_id: jtf2021w.id, place: "A", timebox: "15:10-15:55", time: 45,
title: "機械学習基盤の本番運用とその取り組み", youtube_url: "NdGN1z0q67U"
})
Repo.insert!(%Session{
event_id: jtf2021w.id, place: "A", timebox: "16:05-16:50", time: 45,
title: "エンプラ系大企業でソフトウェアエンジニアリング文化を開花させるために", youtube_url: "NdGN1z0q67U"
})
Repo.insert!(%Session{
event_id: jtf2021w.id, place: "B", timebox: "13:00-13:45", time: 45,
title: "CTOが人事部長も兼務するとどんなカルチャーが生まれるか!?", youtube_url: "NdGN1z0q67U"
})
Repo.insert!(%Session{
event_id: jtf2021w.id, place: "B", timebox: "13:55-14:40", time: 45,
title: "「極める、伝える、教える」の調和", youtube_url: "NdGN1z0q67U"
})
Repo.insert!(%Session{
event_id: jtf2021w.id, place: "B", timebox: "15:10-15:55", time: 45,
title: "「エンジニア像」を言語化し文化の礎を築く", youtube_url: "NdGN1z0q67U"
})
Repo.insert!(%Session{
event_id: jtf2021w.id, place: "B", timebox: "16:05-16:50", time: 45,
title: "エンジニアはアウトプットによって成長できるのか?", youtube_url: "NdGN1z0q67U"
})
Repo.insert!(%Session{
event_id: jtf2021w.id, place: "C", timebox: "13:00-13:45", time: 45,
title: "Kubernetes on OpenStackにおけるハマりどころ", youtube_url: "NdGN1z0q67U"
})
Repo.insert!(%Session{
event_id: jtf2021w.id, place: "C", timebox: "13:55-14:40", time: 45,
title: "「「Kubernetes による Cloud Native な開発」と「VM 時代の開発」 〜新しい技術に向き合い、前進し続けるために考えていること〜", youtube_url: "NdGN1z0q67U"
})
Repo.insert!(%Session{
event_id: jtf2021w.id, place: "C", timebox: "15:10-15:55", time: 45,
title: "Container by default原則 -Kubernetesがまだ浸透しない日本を変える取り組み-", youtube_url: "NdGN1z0q67U"
})
Repo.insert!(%Session{
event_id: jtf2021w.id, place: "C", timebox: "16:05-16:50", time: 45,
title: "Cloud Native開発者のためのDatabase with Kubernetes", youtube_url: "NdGN1z0q67U"
})
| 34.810811 | 114 | 0.706522 |
799c22ca38e6cb709d0ebcf1937558818630705b | 3,148 | ex | Elixir | clients/webmaster/lib/google_api/webmaster/v3/model/wmx_sitemap.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/webmaster/lib/google_api/webmaster/v3/model/wmx_sitemap.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/webmaster/lib/google_api/webmaster/v3/model/wmx_sitemap.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Webmaster.V3.Model.WmxSitemap do
@moduledoc """
Contains detailed information about a specific URL submitted as a sitemap.
## Attributes
* `contents` (*type:* `list(GoogleApi.Webmaster.V3.Model.WmxSitemapContent.t)`, *default:* `nil`) - The various content types in the sitemap.
* `errors` (*type:* `String.t`, *default:* `nil`) - Number of errors in the sitemap. These are issues with the sitemap itself that need to be fixed before it can be processed correctly.
* `isPending` (*type:* `boolean()`, *default:* `nil`) - If true, the sitemap has not been processed.
* `isSitemapsIndex` (*type:* `boolean()`, *default:* `nil`) - If true, the sitemap is a collection of sitemaps.
* `lastDownloaded` (*type:* `DateTime.t`, *default:* `nil`) - Date & time in which this sitemap was last downloaded. Date format is in RFC 3339 format (yyyy-mm-dd).
* `lastSubmitted` (*type:* `DateTime.t`, *default:* `nil`) - Date & time in which this sitemap was submitted. Date format is in RFC 3339 format (yyyy-mm-dd).
* `path` (*type:* `String.t`, *default:* `nil`) - The url of the sitemap.
* `type` (*type:* `String.t`, *default:* `nil`) - The type of the sitemap. For example: rssFeed.
* `warnings` (*type:* `String.t`, *default:* `nil`) - Number of warnings for the sitemap. These are generally non-critical issues with URLs in the sitemaps.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:contents => list(GoogleApi.Webmaster.V3.Model.WmxSitemapContent.t()),
:errors => String.t(),
:isPending => boolean(),
:isSitemapsIndex => boolean(),
:lastDownloaded => DateTime.t(),
:lastSubmitted => DateTime.t(),
:path => String.t(),
:type => String.t(),
:warnings => String.t()
}
field(:contents, as: GoogleApi.Webmaster.V3.Model.WmxSitemapContent, type: :list)
field(:errors)
field(:isPending)
field(:isSitemapsIndex)
field(:lastDownloaded, as: DateTime)
field(:lastSubmitted, as: DateTime)
field(:path)
field(:type)
field(:warnings)
end
defimpl Poison.Decoder, for: GoogleApi.Webmaster.V3.Model.WmxSitemap do
def decode(value, options) do
GoogleApi.Webmaster.V3.Model.WmxSitemap.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Webmaster.V3.Model.WmxSitemap do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 44.338028 | 189 | 0.688056 |
799c38e9b1efe73a5ac32c5dc2986366199b0acc | 1,536 | ex | Elixir | lib/sftp/access_service.ex | wstucco/sftp_ex | 33e60c691a1aece024aca9d163b8aa24100d9d7d | [
"MIT"
] | null | null | null | lib/sftp/access_service.ex | wstucco/sftp_ex | 33e60c691a1aece024aca9d163b8aa24100d9d7d | [
"MIT"
] | null | null | null | lib/sftp/access_service.ex | wstucco/sftp_ex | 33e60c691a1aece024aca9d163b8aa24100d9d7d | [
"MIT"
] | null | null | null | require SftpEx.Helpers, as: S
require Logger
defmodule SFTP.AccessService do
@moduledoc "Functions for accessing files and directories"
@sftp Application.get_env(:sftp_ex, :sftp_service, SFTP.Service)
@doc """
Closes an open file
Returns :ok, or {:error, reason}
"""
def close(connection, handle, _path \\ '') do
case @sftp.close(connection, handle) do
:ok -> :ok
e -> S.handle_error(e)
end
end
@doc """
Returns {:ok, File.Stat}, or {:error, reason}
"""
def file_info(connection, remote_path) do
case @sftp.read_file_info(connection, remote_path) do
{:ok, file_info} -> {:ok, File.Stat.from_record(file_info)}
e -> S.handle_error(e)
end
end
@doc """
Opens a file given a channel PID and path.
{:ok, handle}, or {:error, reason}
"""
def open(connection, path, mode) do
case file_info(connection, path) do
{:ok, info} -> case info.type do
:directory -> open_dir(connection, path)
_ -> open_file(connection, path, mode)
end
e -> S.handle_error(e)
end
end
def open_file(connection, remote_path, mode) do
@sftp.open(connection, remote_path, mode)
end
def open_dir(connection, remote_path) do
case @sftp.open_directory(connection, remote_path) do
{:ok, handle} -> {:ok, handle}
e -> S.handle_error(e)
end
end
defp create_file(connection, path) do
open_file(connection, path, [:creat])
end
end
| 26.482759 | 72 | 0.611979 |
799c4404fdfb7138f17a643faeebfb759a3f12b1 | 184 | exs | Elixir | priv/repo/migrations/20200423155735_add_variant_to_chapters.exs | chownces/cadet | 0d8b264e4fad1c9aaab7ef3f037ac4e07a4c9b22 | [
"Apache-2.0"
] | 27 | 2018-01-20T05:56:24.000Z | 2021-05-24T03:21:55.000Z | priv/repo/migrations/20200423155735_add_variant_to_chapters.exs | chownces/cadet | 0d8b264e4fad1c9aaab7ef3f037ac4e07a4c9b22 | [
"Apache-2.0"
] | 731 | 2018-04-16T13:25:49.000Z | 2021-06-22T07:16:12.000Z | priv/repo/migrations/20200423155735_add_variant_to_chapters.exs | chownces/cadet | 0d8b264e4fad1c9aaab7ef3f037ac4e07a4c9b22 | [
"Apache-2.0"
] | 43 | 2018-01-20T06:35:46.000Z | 2021-05-05T03:22:35.000Z | defmodule Cadet.Repo.Migrations.AddVariantToChapters do
use Ecto.Migration
def change do
alter table(:chapters) do
add(:variant, :string, null: false)
end
end
end
| 18.4 | 55 | 0.711957 |
799c48973b2702a16230c06fe977ae695270ea0b | 7,464 | exs | Elixir | test/avrora/schema/encoder_test.exs | raphaklaus/avrora | c59a45bf84f726d3892cc13a2344965d0e5ef6ad | [
"MIT"
] | 59 | 2019-07-11T15:29:26.000Z | 2022-03-23T19:35:55.000Z | test/avrora/schema/encoder_test.exs | raphaklaus/avrora | c59a45bf84f726d3892cc13a2344965d0e5ef6ad | [
"MIT"
] | 63 | 2019-08-09T17:52:26.000Z | 2022-03-16T22:08:04.000Z | test/avrora/schema/encoder_test.exs | raphaklaus/avrora | c59a45bf84f726d3892cc13a2344965d0e5ef6ad | [
"MIT"
] | 22 | 2019-07-29T10:50:47.000Z | 2021-09-04T13:37:08.000Z | defmodule Avrora.Schema.EncoderTest do
use ExUnit.Case, async: true
doctest Avrora.Schema.Encoder
import Support.Config
alias Avrora.Schema
setup :support_config
describe "from_json/2" do
test "when payload is a valid Record json schema" do
{:ok, schema} = Schema.Encoder.from_json(payment_json())
{:ok, {type, _, _, _, _, fields, full_name, _}} = Schema.Encoder.to_erlavro(schema)
assert type == :avro_record_type
assert full_name == "io.confluent.Payment"
assert length(fields) == 2
assert schema.full_name == "io.confluent.Payment"
assert schema.json == payment_json()
end
test "when payload is a valid Enum schema" do
{:ok, schema} = Schema.Encoder.from_json(card_type_json())
{:ok, {type, _, _, _, _, fields, full_name, _}} = Schema.Encoder.to_erlavro(schema)
assert type == :avro_enum_type
assert full_name == "io.confluent.CardType"
assert length(fields) == 3
assert schema.full_name == "io.confluent.CardType"
assert schema.json == card_type_json()
end
test "when payload is a valid Fixed schema" do
{:ok, schema} = Schema.Encoder.from_json(crc32_json())
{:ok, {type, _, _, _, value, full_name, _}} = Schema.Encoder.to_erlavro(schema)
assert type == :avro_fixed_type
assert full_name == "io.confluent.CRC32"
assert value == 8
assert schema.full_name == "io.confluent.CRC32"
assert schema.json == crc32_json()
end
test "when payload is a valid json schema with external reference and callback returns valid schema" do
{:ok, schema} =
Schema.Encoder.from_json(message_with_reference_json(), fn name ->
case name do
"io.confluent.Attachment" -> {:ok, attachment_json()}
"io.confluent.Signature" -> {:ok, signature_json()}
_ -> raise "unknown reference name!"
end
end)
{:ok, {type, _, _, _, _, fields, full_name, _}} = Schema.Encoder.to_erlavro(schema)
assert type == :avro_record_type
assert full_name == "io.confluent.Message"
assert length(fields) == 2
assert schema.full_name == "io.confluent.Message"
assert schema.json == message_json()
{:avro_record_field, _, _, body_type, _, _, _} = List.first(fields)
assert body_type == {:avro_primitive_type, "string", []}
{:avro_record_field, _, _, attachments_type, _, _, _} = List.last(fields)
{:avro_array_type, {type, _, _, _, _, fields, full_name, _}, []} = attachments_type
assert type == :avro_record_type
assert full_name == "io.confluent.Attachment"
assert length(fields) == 2
{:avro_record_field, _, _, signature_type, _, _, _} = List.last(fields)
{type, _, _, _, _, fields, full_name, _} = signature_type
assert type == :avro_record_type
assert full_name == "io.confluent.Signature"
assert length(fields) == 1
end
test "when payload is a valid json schema with external reference and callback returns invalid schema" do
result =
Schema.Encoder.from_json(message_with_reference_json(), fn name ->
assert name == "io.confluent.Attachment"
{:ok, ~s({})}
end)
assert {:error, {:not_found, "type"}} == result
end
test "when payload is a valid json schema with external reference and callback returns error" do
result =
Schema.Encoder.from_json(message_with_reference_json(), fn name ->
assert name == "io.confluent.Attachment"
{:error, :bad_thing_happen}
end)
assert {:error, :bad_thing_happen} == result
end
test "when payload is a valid json schema with external reference and no callback is given" do
assert {:error, {:not_found, "type"}} ==
Schema.Encoder.from_json(message_with_reference_json())
end
test "when payload is not a named type schema" do
assert Schema.Encoder.from_json(unnamed_json()) == {:error, :unnamed_type}
end
test "when payload is an invalid json schema" do
assert Schema.Encoder.from_json("a:b") == {:error, "argument error"}
assert Schema.Encoder.from_json("{}") == {:error, {:not_found, "type"}}
end
end
describe "to_erlavro/1" do
test "when payload is a valid json schema" do
{:ok, schema} = Schema.Encoder.from_json(payment_json())
{:ok, {type, _, _, _, _, fields, full_name, _}} = Schema.Encoder.to_erlavro(schema)
assert type == :avro_record_type
assert full_name == "io.confluent.Payment"
assert length(fields) == 2
end
end
describe "from_erlavro/2" do
test "when payload is valid and no attributes are given" do
{:ok, schema} = Schema.Encoder.from_erlavro(payment_erlavro())
assert is_nil(schema.id)
assert is_nil(schema.version)
assert schema.full_name == "io.confluent.Payment"
assert schema.json == payment_json()
end
test "when payload is valid and JSON attribute is given" do
{:ok, schema} = Schema.Encoder.from_erlavro(payment_erlavro(), json: "{}")
assert is_nil(schema.id)
assert is_nil(schema.version)
assert schema.full_name == "io.confluent.Payment"
assert schema.json == "{}"
end
test "when payload is not a named type schema" do
assert Schema.Encoder.from_erlavro(unnamed_erlavro()) == {:error, :unnamed_type}
end
end
defp payment_erlavro do
{:avro_record_type, "Payment", "io.confluent", "", [],
[
{:avro_record_field, "id", "", {:avro_primitive_type, "string", []}, :undefined,
:ascending, []},
{:avro_record_field, "amount", "", {:avro_primitive_type, "double", []}, :undefined,
:ascending, []}
], "io.confluent.Payment", []}
end
defp unnamed_erlavro, do: {:avro_array_type, {:avro_primitive_type, "string", []}, []}
defp unnamed_json, do: ~s({"type":"array","items":"string","default":[]})
defp crc32_json, do: ~s({"namespace":"io.confluent","name":"CRC32","type":"fixed","size":8})
defp signature_json do
~s({"namespace":"io.confluent","name":"Signature","type":"record","fields":[{"name":"checksum","type":{"name":"SignatureChecksum","type":"fixed","size":1048576}}]})
end
defp attachment_json do
~s({"namespace":"io.confluent","name":"Attachment","type":"record","fields":[{"name":"name","type":"string"},{"name":"signature","type":"io.confluent.Signature"}]})
end
defp payment_json do
~s({"namespace":"io.confluent","name":"Payment","type":"record","fields":[{"name":"id","type":"string"},{"name":"amount","type":"double"}]})
end
defp card_type_json do
~s({"namespace":"io.confluent","name":"CardType","type":"enum","symbols":["MASTERCARD","VISA","AMERICANEXPRESS"]})
end
defp message_with_reference_json do
~s({"namespace":"io.confluent","name":"Message","type":"record","fields":[{"name":"body","type":"string"},{"name":"attachments","type":{"type":"array","items":"io.confluent.Attachment"}}]})
end
defp message_json do
~s({"namespace":"io.confluent","name":"Message","type":"record","fields":[{"name":"body","type":"string"},{"name":"attachments","type":{"type":"array","items":{"name":"Attachment","type":"record","fields":[{"name":"name","type":"string"},{"name":"signature","type":{"name":"Signature","type":"record","fields":[{"name":"checksum","type":{"name":"SignatureChecksum","type":"fixed","size":1048576}}]}}]}}}]})
end
end
| 38.474227 | 410 | 0.640541 |
799c7163e8bf3de80d485a313fade15dd2cc9756 | 2,300 | ex | Elixir | lib/paddington/transducers/launchpad_transducer.ex | lucidstack/paddington | 24e7709e33d096d7f0d9c5a6e58401b8c6806bcc | [
"Apache-2.0"
] | 6 | 2016-04-09T00:13:20.000Z | 2019-04-15T11:47:59.000Z | lib/paddington/transducers/launchpad_transducer.ex | lucidstack/paddington | 24e7709e33d096d7f0d9c5a6e58401b8c6806bcc | [
"Apache-2.0"
] | 1 | 2018-03-19T00:08:52.000Z | 2018-03-19T00:08:52.000Z | lib/paddington/transducers/launchpad_transducer.ex | lucidstack/paddington | 24e7709e33d096d7f0d9c5a6e58401b8c6806bcc | [
"Apache-2.0"
] | 1 | 2018-03-18T23:56:29.000Z | 2018-03-18T23:56:29.000Z | defmodule Paddington.Transducers.LaunchpadTransducer do
@behaviour Paddington.Transducer
import Logger, only: [warn: 1]
defmodule OutOfBoundsCoordsError, do: defexception [:message]
defmacro in_bounds(coord), do:
quote do: unquote(coord) >= 0 and unquote(coord) <= 7
@grid_status 144
@right_status 144
@top_status 176
@reset_status 176
@top_base_note 104
@right_notes [8, 24, 40, 56, 72, 88, 104, 120]
@base_velocity 12
@press_velocity 127
@release_velocity 0
@reset_velocity 0
def devices, do:
["Launchpad", "Launchpad Mini"]
# MIDI => Paddington
####################
# Top row
def to_coord({@top_status, note, @press_velocity}), do:
{:top, note - @top_base_note, :pressed}
def to_coord({@top_status, note, @release_velocity}), do:
{:top, note - @top_base_note, :released}
# Right column
def to_coord({@right_status, note, @press_velocity}) when note in @right_notes, do:
{:right, (note - 8) / 16, :pressed}
def to_coord({@right_status, note, @release_velocity}) when note in @right_notes, do:
{:right, (note - 8) / 16, :released}
# Grid
def to_coord({@grid_status, note, @press_velocity}), do:
{:grid, rem(note, 16), trunc(note/16), :pressed}
def to_coord({@grid_status, note, @release_velocity}), do:
{:grid, rem(note, 16), trunc(note/16), :released}
# Fallback
def to_coord(input), do:
warn("Can't find a tranducer for MIDI event " <> inspect(input))
# Paddington => MIDI
####################
def to_midi(:grid, pos: {x, y}, colors: colors) when in_bounds(x) and in_bounds(y), do:
{@grid_status, to_note(x, y), velocity(colors)}
def to_midi(:grid, pos: {_x, _y}, colors: _), do:
raise OutOfBoundsCoordsError, "x and y must be between 0 and 7"
def to_midi(:reset), do:
{@reset_status, 0, @reset_velocity}
# Private implementation
########################
defp to_note(x, y), do: y * 16 + x
import Keyword, only: [get: 3]
defp velocity(colors) do
red = (colors |> get(:red, :off) |> brightness)
green = (colors |> get(:green, :off) |> brightness) * 16
red + green + @base_velocity
end
defp brightness(:off), do: 0
defp brightness(:low), do: 1
defp brightness(:medium), do: 2
defp brightness(:high), do: 3
end
| 28.75 | 89 | 0.635652 |
799cbb84ff4e9710b9730d62f7d96ceb2a72b802 | 356 | ex | Elixir | lib/veil.ex | hassan/veil | 345b5d3539ccaba7e51b4e83583e9d15d8379fd4 | [
"MIT"
] | null | null | null | lib/veil.ex | hassan/veil | 345b5d3539ccaba7e51b4e83583e9d15d8379fd4 | [
"MIT"
] | null | null | null | lib/veil.ex | hassan/veil | 345b5d3539ccaba7e51b4e83583e9d15d8379fd4 | [
"MIT"
] | null | null | null | defmodule Veil do
@moduledoc """
Documentation for Veil.
"""
use Application
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
worker(Veil.Scheduler, []),
worker(Veil.Secret, [])
]
opts = [strategy: :one_for_one, name: Veil.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 17.8 | 58 | 0.643258 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.