hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9e09e49e44c5313520557c45d6f3e2030039bbf8 | 1,019 | ex | Elixir | installer/templates/phx_web/views/error_view.ex | faheempatel/phoenix | a83318f2a2284b7ab29b0b86cdd9d2e1f4d0a7c9 | [
"MIT"
] | 18,092 | 2015-01-01T01:51:04.000Z | 2022-03-31T19:37:14.000Z | installer/templates/phx_web/views/error_view.ex | faheempatel/phoenix | a83318f2a2284b7ab29b0b86cdd9d2e1f4d0a7c9 | [
"MIT"
] | 3,905 | 2015-01-01T00:22:47.000Z | 2022-03-31T17:06:21.000Z | installer/templates/phx_web/views/error_view.ex | faheempatel/phoenix | a83318f2a2284b7ab29b0b86cdd9d2e1f4d0a7c9 | [
"MIT"
] | 3,205 | 2015-01-03T10:58:22.000Z | 2022-03-30T14:55:57.000Z | defmodule <%= @web_namespace %>.ErrorView do
use <%= @web_namespace %>, :view
<%= if @html do %># If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end<% else %># If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.json", _assigns) do
# %{errors: %{detail: "Internal Server Error"}}
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.json" becomes
# "Not Found".
def template_not_found(template, _assigns) do
%{errors: %{detail: Phoenix.Controller.status_message_from_template(template)}}
end<% end %>
end
| 36.392857 | 83 | 0.703631 |
9e09ea559786b670945cb86f6639b2086fcdcfbd | 1,229 | ex | Elixir | lib/game/command/mistake.ex | stevegrossi/ex_venture | e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa | [
"MIT"
] | null | null | null | lib/game/command/mistake.ex | stevegrossi/ex_venture | e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa | [
"MIT"
] | null | null | null | lib/game/command/mistake.ex | stevegrossi/ex_venture | e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa | [
"MIT"
] | null | null | null | defmodule Game.Command.Mistake do
@moduledoc """
Module to capture common mistakes.
"""
use Game.Command
commands(["kill", "attack"], parse: false)
@impl Game.Command
def help(:topic), do: "Mistakes"
def help(:short), do: "Common command mistakes"
def help(:full) do
"""
#{help(:short)}. This command catches common mistakes and directs you
to more information about the subject.
"""
end
@impl true
def parse(command, _context), do: parse(command)
@impl Game.Command
@doc """
Parse out extra information
iex> Game.Command.Mistake.parse("kill")
{:auto_combat}
iex> Game.Command.Mistake.parse("attack")
{:auto_combat}
iex> Game.Command.Mistake.parse("unknown")
{:error, :bad_parse, "unknown"}
"""
@spec parse(command :: String.t()) :: {atom}
def parse(command)
def parse("attack" <> _), do: {:auto_combat}
def parse("kill" <> _), do: {:auto_combat}
@impl Game.Command
def run(command, state)
def run({:auto_combat}, %{socket: socket}) do
message =
gettext(
"There is no auto combat. Please read {command}help combat{/command} for more information."
)
socket |> @socket.echo(message)
end
end
| 22.759259 | 99 | 0.63629 |
9e0a1148181f43428ce0fc5be42009425994fe6f | 583 | exs | Elixir | test/kindler_web/views/error_view_test.exs | hbogaeus/kindler | 0745ca9eed91f378cf605f808645b301f78f8676 | [
"MIT"
] | null | null | null | test/kindler_web/views/error_view_test.exs | hbogaeus/kindler | 0745ca9eed91f378cf605f808645b301f78f8676 | [
"MIT"
] | null | null | null | test/kindler_web/views/error_view_test.exs | hbogaeus/kindler | 0745ca9eed91f378cf605f808645b301f78f8676 | [
"MIT"
] | null | null | null | defmodule KindlerWeb.ErrorViewTest do
use KindlerWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(KindlerWeb.ErrorView, "404.html", []) ==
"Page not found"
end
test "render 500.html" do
assert render_to_string(KindlerWeb.ErrorView, "500.html", []) ==
"Internal server error"
end
test "render any other" do
assert render_to_string(KindlerWeb.ErrorView, "505.html", []) ==
"Internal server error"
end
end
| 26.5 | 68 | 0.684391 |
9e0a297a05ba00beab81363301cc2ce73a8ce652 | 2,190 | ex | Elixir | lib/mix/lib/mix/tasks/compile.leex.ex | britto/elixir | 1f6e7093cff4b68dada60b924399bc8404d39a7e | [
"Apache-2.0"
] | 2 | 2018-11-15T06:38:14.000Z | 2018-11-17T18:03:14.000Z | lib/mix/lib/mix/tasks/compile.leex.ex | britto/elixir | 1f6e7093cff4b68dada60b924399bc8404d39a7e | [
"Apache-2.0"
] | 1 | 2015-06-09T15:52:43.000Z | 2015-06-09T15:52:43.000Z | lib/mix/lib/mix/tasks/compile.leex.ex | britto/elixir | 1f6e7093cff4b68dada60b924399bc8404d39a7e | [
"Apache-2.0"
] | 1 | 2018-01-09T20:10:59.000Z | 2018-01-09T20:10:59.000Z | defmodule Mix.Tasks.Compile.Leex do
use Mix.Task.Compiler
alias Mix.Compilers.Erlang
@recursive true
@manifest "compile.leex"
@switches [force: :boolean, verbose: :boolean, all_warnings: :boolean]
# These options can't be controlled with :leex_options.
@forced_opts [report: true, return: true]
@moduledoc """
Compiles Leex source files.
When this task runs, it will check the modification time of every file, and
if it has changed, the file will be compiled. Files will be
compiled in the same source directory with a .erl extension.
You can force compilation regardless of modification times by passing
the `--force` option.
## Command line options
* `--force` - forces compilation regardless of modification times
* `--all-warnings` - prints warnings even from files that do not need to be
recompiled
## Configuration
* `:erlc_paths` - directories to find source files. Defaults to `["src"]`.
* `:leex_options` - compilation options that apply
to Leex's compiler.
For a complete list of options, see `:leex.file/2`.
Note that the `:report`, `:return_errors`, and `:return_warnings` options
are overridden by this compiler, thus setting them has no effect.
"""
@impl true
def run(args) do
{opts, _, _} = OptionParser.parse(args, switches: @switches)
project = Mix.Project.config()
source_paths = project[:erlc_paths]
Mix.Compilers.Erlang.assert_valid_erlc_paths(source_paths)
mappings = Enum.zip(source_paths, source_paths)
options = project[:leex_options] || []
unless is_list(options) do
Mix.raise(":leex_options should be a list of options, got: #{inspect(options)}")
end
Erlang.compile(manifest(), mappings, :xrl, :erl, opts, fn input, output ->
Erlang.ensure_application!(:parsetools, input)
options = options ++ @forced_opts ++ [scannerfile: Erlang.to_erl_file(output)]
:leex.file(Erlang.to_erl_file(input), options)
end)
end
@impl true
def manifests, do: [manifest()]
defp manifest, do: Path.join(Mix.Project.manifest_path(), @manifest)
@impl true
def clean do
Erlang.clean(manifest())
end
end
| 29.594595 | 86 | 0.696347 |
9e0a2cf3f7431e1ce28d475ba2b9e9e3b97c7e31 | 255 | exs | Elixir | config/test.exs | knathan2/dot | 2e73d1339592e8d317d1b7c5acf777f86aeebe9d | [
"Apache-2.0"
] | null | null | null | config/test.exs | knathan2/dot | 2e73d1339592e8d317d1b7c5acf777f86aeebe9d | [
"Apache-2.0"
] | null | null | null | config/test.exs | knathan2/dot | 2e73d1339592e8d317d1b7c5acf777f86aeebe9d | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :dot, Dot.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 23.181818 | 56 | 0.72549 |
9e0a7e527ec64b34874ec4fc9760416e5160d68d | 3,516 | exs | Elixir | apps/ewallet/test/ewallet/web/config_test.exs | enyan94/ewallet | e938e686319867d133b21cd0eb5496e213ae7620 | [
"Apache-2.0"
] | null | null | null | apps/ewallet/test/ewallet/web/config_test.exs | enyan94/ewallet | e938e686319867d133b21cd0eb5496e213ae7620 | [
"Apache-2.0"
] | null | null | null | apps/ewallet/test/ewallet/web/config_test.exs | enyan94/ewallet | e938e686319867d133b21cd0eb5496e213ae7620 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWallet.Web.ConfigTest do
# `async: false` since the tests require `Application.put_env/3`.
use ExUnit.Case, async: false
alias EWallet.Web.Config
# Take note of the original config value, then delete it.
defp set_system_env(key, nil) do
original = System.get_env(key)
{System.delete_env(key), original}
end
# Take note of the original config value, then update it.
defp set_system_env(key, value) when not is_binary(value) do
set_system_env(key, to_string(value))
end
defp set_system_env(key, value) do
original = System.get_env(key)
{System.put_env(key, value), original}
end
# Take note of the original config value, then delete it.
defp delete_config(app, key) do
original = Application.get_env(app, key)
{Application.delete_env(app, key), original}
end
describe "configure_cors_plug/0" do
test "sets CORS_MAX_AGE to :max_age" do
new_env = 1234
{:ok, original_env} = set_system_env("CORS_MAX_AGE", new_env)
{:ok, original_config} = delete_config(:cors_plug, :max_age)
# Invoke & assert
res = Config.configure_cors_plug()
assert res == :ok
assert Application.get_env(:cors_plug, :max_age) == new_env
# Revert the env var and app config to their original values.
:ok = Application.put_env(:cors_plug, :max_age, original_config)
{:ok, _} = set_system_env("CORS_MAX_AGE", original_env)
end
test "sets the :headers to a list" do
{:ok, original_config} = delete_config(:cors_plug, :headers)
# Invoke & assert
res = Config.configure_cors_plug()
assert res == :ok
assert is_list(Application.get_env(:cors_plug, :headers))
# Revert the app config to its original value.
:ok = Application.put_env(:cors_plug, :headers, original_config)
end
test "sets the :methods to [\"POST\"]" do
{:ok, original_config} = delete_config(:cors_plug, :methods)
# Invoke & assert
res = Config.configure_cors_plug()
assert res == :ok
assert is_list(Application.get_env(:cors_plug, :headers))
# Revert the app config to its original value.
:ok = Application.put_env(:cors_plug, :headers, original_config)
end
test "sets CORS_ORIGIN to :origin" do
new_env = "https://example.com, https://second.example.com"
new_parsed_env = ["https://example.com", "https://second.example.com"]
{:ok, original_env} = set_system_env("CORS_ORIGIN", new_env)
{:ok, original_config} = delete_config(:cors_plug, :origin)
# Invoke & assert
res = Config.configure_cors_plug()
assert res == :ok
assert Application.get_env(:cors_plug, :origin) == new_parsed_env
# Revert the env var and app config to their original values.
:ok = Application.put_env(:cors_plug, :origin, original_config)
{:ok, _} = set_system_env("CORS_ORIGIN", original_env)
end
end
end
| 35.16 | 76 | 0.692548 |
9e0ae124c95577d3f51c2ba230440da2aea26743 | 2,040 | ex | Elixir | clients/ad_sense/lib/google_api/ad_sense/v2/model/alert.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/ad_sense/lib/google_api/ad_sense/v2/model/alert.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/ad_sense/lib/google_api/ad_sense/v2/model/alert.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdSense.V2.Model.Alert do
@moduledoc """
Representation of an alert.
## Attributes
* `message` (*type:* `String.t`, *default:* `nil`) - Output only. The localized alert message. This may contain HTML markup, such as phrase elements or links.
* `name` (*type:* `String.t`, *default:* `nil`) - Resource name of the alert. Format: accounts/{account}/alerts/{alert}
* `severity` (*type:* `String.t`, *default:* `nil`) - Output only. Severity of this alert.
* `type` (*type:* `String.t`, *default:* `nil`) - Output only. Type of alert. This identifies the broad type of this alert, and provides a stable machine-readable identifier that will not be translated. For example, "payment-hold".
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:message => String.t() | nil,
:name => String.t() | nil,
:severity => String.t() | nil,
:type => String.t() | nil
}
field(:message)
field(:name)
field(:severity)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.AdSense.V2.Model.Alert do
def decode(value, options) do
GoogleApi.AdSense.V2.Model.Alert.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdSense.V2.Model.Alert do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.428571 | 235 | 0.695588 |
9e0ae1b5764f4e250967850076858c26a8e616b1 | 9,913 | exs | Elixir | lib/elixir/test/elixir/kernel/raise_test.exs | namjae/elixir | 6d1561a5939d68fb61f422b83271fbc824847395 | [
"Apache-2.0"
] | 1 | 2021-05-20T13:08:37.000Z | 2021-05-20T13:08:37.000Z | lib/elixir/test/elixir/kernel/raise_test.exs | namjae/elixir | 6d1561a5939d68fb61f422b83271fbc824847395 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/kernel/raise_test.exs | namjae/elixir | 6d1561a5939d68fb61f422b83271fbc824847395 | [
"Apache-2.0"
] | 8 | 2018-02-20T18:30:53.000Z | 2019-06-18T14:23:31.000Z | Code.require_file("../test_helper.exs", __DIR__)
defmodule Kernel.RaiseTest do
use ExUnit.Case, async: true
# Silence warnings
defp atom, do: RuntimeError
defp binary, do: "message"
defp opts, do: [message: "message"]
defp struct, do: %RuntimeError{message: "message"}
@trace [{:foo, :bar, 0, []}]
test "raise message" do
assert_raise RuntimeError, "message", fn ->
raise "message"
end
assert_raise RuntimeError, "message", fn ->
var = binary()
raise var
end
end
test "raise with no arguments" do
assert_raise RuntimeError, fn ->
raise RuntimeError
end
assert_raise RuntimeError, fn ->
var = atom()
raise var
end
end
test "raise with arguments" do
assert_raise RuntimeError, "message", fn ->
raise RuntimeError, message: "message"
end
assert_raise RuntimeError, "message", fn ->
atom = atom()
opts = opts()
raise atom, opts
end
end
test "raise existing exception" do
assert_raise RuntimeError, "message", fn ->
raise %RuntimeError{message: "message"}
end
assert_raise RuntimeError, "message", fn ->
var = struct()
raise var
end
end
test "reraise message" do
try do
reraise "message", @trace
flunk("should not reach")
rescue
RuntimeError ->
assert @trace == :erlang.get_stacktrace()
end
try do
var = binary()
reraise var, @trace
flunk("should not reach")
rescue
RuntimeError ->
assert @trace == :erlang.get_stacktrace()
end
end
test "reraise with no arguments" do
try do
reraise RuntimeError, @trace
flunk("should not reach")
rescue
RuntimeError ->
assert @trace == :erlang.get_stacktrace()
end
try do
var = atom()
reraise var, @trace
flunk("should not reach")
rescue
RuntimeError ->
assert @trace == :erlang.get_stacktrace()
end
end
test "reraise with arguments" do
try do
reraise RuntimeError, [message: "message"], @trace
flunk("should not reach")
rescue
RuntimeError ->
assert @trace == :erlang.get_stacktrace()
end
try do
atom = atom()
opts = opts()
reraise atom, opts, @trace
flunk("should not reach")
rescue
RuntimeError ->
assert @trace == :erlang.get_stacktrace()
end
end
test "reraise existing exception" do
try do
reraise %RuntimeError{message: "message"}, @trace
flunk("should not reach")
rescue
RuntimeError ->
assert @trace == :erlang.get_stacktrace()
end
try do
var = struct()
reraise var, @trace
flunk("should not reach")
rescue
RuntimeError ->
assert @trace == :erlang.get_stacktrace()
end
end
describe "rescue" do
test "runtime error" do
result =
try do
raise "an exception"
rescue
RuntimeError -> true
catch
:error, _ -> false
end
assert result
result =
try do
raise "an exception"
rescue
AnotherError -> true
catch
:error, _ -> false
end
refute result
end
test "named runtime error" do
result =
try do
raise "an exception"
rescue
x in [RuntimeError] -> Exception.message(x)
catch
:error, _ -> false
end
assert result == "an exception"
end
test "with higher precedence than catch" do
result =
try do
raise "an exception"
rescue
_ -> true
catch
_, _ -> false
end
assert result
end
test "argument error from erlang" do
result =
try do
:erlang.error(:badarg)
rescue
ArgumentError -> true
end
assert result
end
test "argument error from elixir" do
result =
try do
raise ArgumentError, ""
rescue
ArgumentError -> true
end
assert result
end
test "catch-all variable" do
result =
try do
raise "an exception"
rescue
x -> Exception.message(x)
end
assert result == "an exception"
end
test "catch-all underscore" do
result =
try do
raise "an exception"
rescue
_ -> true
end
assert result
end
test "catch-all unused variable" do
result =
try do
raise "an exception"
rescue
_any -> true
end
assert result
end
test "catch-all with \"x in _\" syntax" do
result =
try do
raise "an exception"
rescue
exception in _ ->
Exception.message(exception)
end
assert result == "an exception"
end
end
describe "normalize" do
test "wrap custom Erlang error" do
result =
try do
:erlang.error(:sample)
rescue
x in [ErlangError] -> Exception.message(x)
end
assert result == "Erlang error: :sample"
end
test "undefined function error" do
result =
try do
DoNotExist.for_sure()
rescue
x in [UndefinedFunctionError] -> Exception.message(x)
end
assert result ==
"function DoNotExist.for_sure/0 is undefined (module DoNotExist is not available)"
end
test "function clause error" do
result =
try do
zero(1)
rescue
x in [FunctionClauseError] -> Exception.message(x)
end
assert result == "no function clause matching in Kernel.RaiseTest.zero/1"
end
test "badarg error" do
result =
try do
:erlang.error(:badarg)
rescue
x in [ArgumentError] -> Exception.message(x)
end
assert result == "argument error"
end
test "tuple badarg error" do
result =
try do
:erlang.error({:badarg, [1, 2, 3]})
rescue
x in [ArgumentError] -> Exception.message(x)
end
assert result == "argument error: [1, 2, 3]"
end
test "badarith error" do
result =
try do
:erlang.error(:badarith)
rescue
x in [ArithmeticError] -> Exception.message(x)
end
assert result == "bad argument in arithmetic expression"
end
test "badarity error" do
fun = fn x -> x end
string = "#{inspect(fun)} with arity 1 called with 2 arguments (1, 2)"
result =
try do
fun.(1, 2)
rescue
x in [BadArityError] -> Exception.message(x)
end
assert result == string
end
test "badfun error" do
# Avoid "invalid function call" warning in >= OTP 19
x = fn -> :example end
result =
try do
x.().(2)
rescue
x in [BadFunctionError] -> Exception.message(x)
end
assert result == "expected a function, got: :example"
end
test "badmatch error" do
x = :example
result =
try do
^x = zero(0)
rescue
x in [MatchError] -> Exception.message(x)
end
assert result == "no match of right hand side value: 0"
end
test "bad key error" do
result =
try do
%{%{} | foo: :bar}
rescue
x in [KeyError] -> Exception.message(x)
end
assert result == "key :foo not found"
result =
try do
%{}.foo
rescue
x in [KeyError] -> Exception.message(x)
end
assert result == "key :foo not found in: %{}"
end
test "bad map error" do
result =
try do
%{zero(0) | foo: :bar}
rescue
x in [BadMapError] -> Exception.message(x)
end
assert result == "expected a map, got: 0"
end
test "bad boolean error" do
result =
try do
1 and true
rescue
x in [BadBooleanError] -> Exception.message(x)
end
assert result == "expected a boolean on left-side of \"and\", got: 1"
end
test "case clause error" do
x = :example
result =
try do
case zero(0) do
^x -> nil
end
rescue
x in [CaseClauseError] -> Exception.message(x)
end
assert result == "no case clause matching: 0"
end
test "cond clause error" do
result =
try do
cond do
!zero(0) -> :ok
end
rescue
x in [CondClauseError] -> Exception.message(x)
end
assert result == "no cond clause evaluated to a true value"
end
test "try clause error" do
f = fn -> :example end
result =
try do
try do
f.()
else
:other ->
:ok
end
rescue
x in [TryClauseError] -> Exception.message(x)
end
assert result == "no try clause matching: :example"
end
test "undefined function error as Erlang error" do
result =
try do
DoNotExist.for_sure()
rescue
x in [ErlangError] -> Exception.message(x)
end
assert result ==
"function DoNotExist.for_sure/0 is undefined (module DoNotExist is not available)"
end
end
defmacrop exceptions do
[ErlangError]
end
test "with macros" do
result =
try do
DoNotExist.for_sure()
rescue
x in exceptions() -> Exception.message(x)
end
assert result ==
"function DoNotExist.for_sure/0 is undefined (module DoNotExist is not available)"
end
defp zero(0), do: 0
end
| 20.52381 | 97 | 0.537173 |
9e0b130fe9742080eb8e4e03f7d6b30c09583310 | 1,178 | ex | Elixir | lib/litelist_web/channels/user_socket.ex | saycel/Litelist | 2a1f67385523e5388c431c307b4052a6f8778818 | [
"MIT"
] | 1 | 2021-01-13T15:46:49.000Z | 2021-01-13T15:46:49.000Z | lib/litelist_web/channels/user_socket.ex | saycel/Litelist | 2a1f67385523e5388c431c307b4052a6f8778818 | [
"MIT"
] | 1 | 2021-01-13T16:42:24.000Z | 2021-01-13T16:42:24.000Z | lib/litelist_web/channels/user_socket.ex | saycel/Litelist | 2a1f67385523e5388c431c307b4052a6f8778818 | [
"MIT"
] | null | null | null | defmodule LitelistWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", LitelistWeb.RoomChannel
## Transports
# transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# LitelistWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 31 | 83 | 0.702886 |
9e0b36fc51adea2b2534dbcc9c49587bd7c07b18 | 1,774 | exs | Elixir | test/bsv/crypto/ecies_test.exs | afomi/bsv-ex | a31db1e9d223aa4ac9cc00e86b1e6344a0037805 | [
"Apache-2.0"
] | null | null | null | test/bsv/crypto/ecies_test.exs | afomi/bsv-ex | a31db1e9d223aa4ac9cc00e86b1e6344a0037805 | [
"Apache-2.0"
] | null | null | null | test/bsv/crypto/ecies_test.exs | afomi/bsv-ex | a31db1e9d223aa4ac9cc00e86b1e6344a0037805 | [
"Apache-2.0"
] | null | null | null | defmodule BSV.Crypto.ECIESTest do
use ExUnit.Case
doctest BSV.Crypto.ECIES
setup_all do
keys = BSV.KeyPair.generate
%{
pub_key: keys.public_key,
priv_key: keys.private_key
}
end
describe "BSV.Crypto.ECIES.encrypt/3 and BSV.Crypto.ECIES.decrypt/3" do
test "encryption with public key and decryption with private key", ctx do
result = "hello world"
|> BSV.Crypto.ECIES.encrypt(ctx.pub_key)
|> BSV.Crypto.ECIES.decrypt(ctx.priv_key)
assert result == "hello world"
end
test "must encrypt and return a binary", ctx do
enc_data = BSV.Crypto.ECIES.encrypt("hello world", ctx.pub_key)
assert enc_data != "hello world"
assert byte_size(enc_data) >= 85
end
test "must return specifified encoding", ctx do
enc_data = BSV.Crypto.ECIES.encrypt("hello world", ctx.pub_key, encoding: :hex)
assert String.match?(enc_data, ~r/^[a-f0-9]+$/i)
end
end
describe "External messages" do
test "decrypt message from bsv.js" do
keys = BSV.Test.bsv_keys |> BSV.KeyPair.from_ecdsa_key
data = "QklFMQMtEGxuc+iWInmjAwv6TXBZeH9qSGAygd86Cl3uM8xR7HDRahwebjAI05NEaSsXdGU7uwDZB01idKa9V1kaAkavijnrlUXIkaaIZ1jxn+LzUy0PxUCx7MlNO24XHlHUoRA="
msg = BSV.Crypto.ECIES.decrypt(data, keys.private_key, encoding: :base64)
assert msg == "Yes, today is FRIDAY!"
end
test "decrypt message from Electrum" do
keys = BSV.Test.bsv_keys |> BSV.KeyPair.from_ecdsa_key
data = "QklFMQMtfEIACPib3IMLXziejcfFhP6ljTbudAzTs1fnsc8QDU2fIenGbSH0XXUBfERf4DgYnrh7gmH98GymM2oHUkXoaVXpOWnwd5h+VtydSUDM0r4HO5RwwfIOUmfsLmNQ+t0="
msg = BSV.Crypto.ECIES.decrypt(data, keys.private_key, encoding: :base64)
assert msg == "It's friday today!"
end
end
end
| 34.115385 | 151 | 0.708005 |
9e0b3f65fd88de4f92f75329b86d071cd1789990 | 54 | ex | Elixir | lib/tnd/characters/gear.ex | tndrpg/tnd | a9a348ed7ce2f3d8f55046559f9551e2607f3236 | [
"0BSD"
] | null | null | null | lib/tnd/characters/gear.ex | tndrpg/tnd | a9a348ed7ce2f3d8f55046559f9551e2607f3236 | [
"0BSD"
] | 1 | 2021-05-11T14:31:58.000Z | 2021-05-11T14:31:58.000Z | lib/tnd/characters/gear.ex | tndrpg/tnd | a9a348ed7ce2f3d8f55046559f9551e2607f3236 | [
"0BSD"
] | null | null | null | defmodule Tnd.Characters.Gear do
use Tnd.Schema
end
| 13.5 | 32 | 0.796296 |
9e0b4706bacc125ed42ecc0d3c73044a214d3f7f | 1,966 | ex | Elixir | lib/people_sorter/cli.ex | mstang/people_sorter | e712622c071748c79b26e977a8b029e1ba1877c5 | [
"MIT"
] | null | null | null | lib/people_sorter/cli.ex | mstang/people_sorter | e712622c071748c79b26e977a8b029e1ba1877c5 | [
"MIT"
] | null | null | null | lib/people_sorter/cli.ex | mstang/people_sorter | e712622c071748c79b26e977a8b029e1ba1877c5 | [
"MIT"
] | null | null | null | defmodule PeopleSorter.CLI do
@moduledoc """
This is the command line interface to People Sorter
"""
@doc """
Main entry point for the cli process
"""
def main(args \\ []) do
IO.puts("Welcome to the People Sorter Program")
with {sort_by, filenames} <- parse_args(args),
:ok <- validate_sort_by(sort_by),
:ok <- validate_filenames(filenames) do
load_files(filenames)
print_response(sort_by)
else
:sort_by_error ->
IO.puts("--sort-by is required and must be either color, dob or last_name")
:missing_filename_error ->
IO.puts("At least one filename is required")
end
end
@doc """
parse the command-line arguments
"""
def parse_args(command_line_args) do
{parsed, filenames, _invalid} =
OptionParser.parse(command_line_args, strict: [sort_by: :string])
sort_by = parsed[:sort_by]
{sort_by, filenames}
end
@doc """
Validate that a correct sort_by was provided
"""
def validate_sort_by(sort_by) when sort_by in ["dob", "color", "last_name"] do
:ok
end
def validate_sort_by(_) do
:sort_by_error
end
@doc """
Validate that we have at least one filename was provided
"""
def validate_filenames([]) do
:missing_filename_error
end
def validate_filenames(_) do
:ok
end
@doc """
load_files - load each individual file
"""
def load_files(filenames) do
for filename <- filenames do
PeopleSorter.FileLoader.load_file(filename)
end
end
@doc """
Print out the list of the file(s) that were processed
"""
def print_response(sort_by) do
sorted_list =
case sort_by do
"dob" -> PeopleSorter.get_list_sorted_by_dob()
"last_name" -> PeopleSorter.get_list_sorted_by_last_name()
"color" -> PeopleSorter.get_list_sorted_by_color_last_name()
end
for person <- sorted_list do
person
|> to_string
|> IO.puts()
end
end
end
| 23.129412 | 83 | 0.65412 |
9e0b7ad74afbba8e327e0f5a6a44f2a4e9c776c0 | 66,346 | ex | Elixir | lib/elixir/lib/module.ex | Javyre/elixir | 4da31098a84268d040e569590515744c02efb9cc | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/module.ex | Javyre/elixir | 4da31098a84268d040e569590515744c02efb9cc | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/module.ex | Javyre/elixir | 4da31098a84268d040e569590515744c02efb9cc | [
"Apache-2.0"
] | null | null | null | defmodule Module do
@moduledoc ~S'''
Provides functions to deal with modules during compilation time.
It allows a developer to dynamically add, delete and register
attributes, attach documentation and so forth.
After a module is compiled, using many of the functions in
this module will raise errors, since it is out of their scope
to inspect runtime data. Most of the runtime data can be inspected
via the [`__info__/1`](`c:Module.__info__/1`) function attached to
each compiled module.
## Module attributes
Each module can be decorated with one or more attributes. The following ones
are currently defined by Elixir:
### `@after_compile`
A hook that will be invoked right after the current module is compiled.
Accepts a module or a `{module, function_name}`. See the "Compile callbacks"
section below.
### `@before_compile`
A hook that will be invoked before the module is compiled.
Accepts a module or a `{module, function_or_macro_name}` tuple.
See the "Compile callbacks" section below.
### `@behaviour`
Note the British spelling!
Behaviours can be referenced by modules to ensure they implement
required specific function signatures defined by `@callback`.
For example, you could specify a `URI.Parser` behaviour as follows:
defmodule URI.Parser do
@doc "Defines a default port"
@callback default_port() :: integer
@doc "Parses the given URL"
@callback parse(uri_info :: URI.t()) :: URI.t()
end
And then a module may use it as:
defmodule URI.HTTP do
@behaviour URI.Parser
def default_port(), do: 80
def parse(info), do: info
end
If the behaviour changes or `URI.HTTP` does not implement
one of the callbacks, a warning will be raised.
### `@impl`
To aid in the correct implementation of behaviours, you may optionally declare
`@impl` for implemented callbacks of a behaviour. This makes callbacks
explicit and can help you to catch errors in your code. The compiler will warn
in these cases:
* if you mark a function with `@impl` when that function is not a callback.
* if you don't mark a function with `@impl` when other functions are marked
with `@impl`. If you mark one function with `@impl`, you must mark all
other callbacks for that behaviour as `@impl`.
`@impl` works on a per-context basis. If you generate a function through a macro
and mark it with `@impl`, that won't affect the module where that function is
generated in.
`@impl` also helps with maintainability by making it clear to other developers
that the function is implementing a callback.
Using `@impl`, the example above can be rewritten as:
defmodule URI.HTTP do
@behaviour URI.Parser
@impl true
def default_port(), do: 80
@impl true
def parse(info), do: info
end
You may pass either `false`, `true`, or a specific behaviour to `@impl`.
defmodule Foo do
@behaviour Bar
@behaviour Baz
# Will warn if neither Bar nor Baz specify a callback named bar/0.
@impl true
def bar(), do: :ok
# Will warn if Baz does not specify a callback named baz/0.
@impl Baz
def baz(), do: :ok
end
The code is now more readable, as it is now clear which functions are
part of your API and which ones are callback implementations. To reinforce this
idea, `@impl true` automatically marks the function as `@doc false`, disabling
documentation unless `@doc` is explicitly set.
### `@compile`
Defines options for module compilation. This is used to configure
both Elixir and Erlang compilers, as any other compilation pass
added by external tools. For example:
defmodule MyModule do
@compile {:inline, my_fun: 1}
def my_fun(arg) do
to_string(arg)
end
end
Multiple uses of `@compile` will accumulate instead of overriding
previous ones. See the "Compile options" section below.
### `@deprecated`
Provides the deprecation reason for a function. For example:
defmodule Keyword do
@deprecated "Use Kernel.length/1 instead"
def size(keyword) do
length(keyword)
end
end
The Mix compiler automatically looks for calls to deprecated modules
and emit warnings during compilation, computed via `mix xref warnings`.
Using the `@deprecated` attribute will also be reflected in the
documentation of the given function and macro. You can choose between
the `@deprecated` attribute and the documentation metadata to provide
hard-deprecations (with warnings) and soft-deprecations (without warnings):
This is a soft-deprecation as it simply annotates the documentation
as deprecated:
@doc deprecated: "Use Kernel.length/1 instead"
def size(keyword)
This is a hard-deprecation as it emits warnings and annotates the
documentation as deprecated:
@deprecated "Use Kernel.length/1 instead"
def size(keyword)
Currently `@deprecated` only supports functions and macros. However
you can use the `:deprecated` key in the annotation metadata to
annotate the docs of modules, types and callbacks too.
We recommend using this feature with care, especially library authors.
Deprecating code always pushes the burden towards library users. We
also recommend for deprecated functionality to be maintained for long
periods of time, even after deprecation, giving developers plenty of
time to update (except for cases where keeping the deprecated API is
undesired, such as in the presence of security issues).
### `@doc` and `@typedoc`
Provides documentation for the entity that follows the attribute.
`@doc` is to be used with a function, macro, callback, or
macrocallback, while `@typedoc` with a type (public or opaque).
Accepts a string (often a heredoc) or `false` where `@doc false` will
make the entity invisible to documentation extraction tools like
[`ExDoc`](https://hexdocs.pm/ex_doc/). For example:
defmodule MyModule do
@typedoc "This type"
@typedoc since: "1.1.0"
@type t :: term
@doc "Hello world"
@doc since: "1.1.0"
def hello do
"world"
end
@doc """
Sums `a` to `b`.
"""
def sum(a, b) do
a + b
end
end
As can be seen in the example above, `@doc` and `@typedoc` also accept
a keyword list that serves as a way to provide arbitrary metadata
about the entity. Tools like [`ExDoc`](https://hexdocs.pm/ex_doc/) and
`IEx` may use this information to display annotations. A common use
case is `since` that may be used to annotate in which version the
function was introduced.
As illustrated in the example, it is possible to use these attributes
more than once before an entity. However, the compiler will warn if
used twice with binaries as that replaces the documentation text from
the preceding use. Multiple uses with keyword lists will merge the
lists into one.
Note that since the compiler also defines some additional metadata,
there are a few reserved keys that will be ignored and warned if used.
Currently these are: `:opaque` and `:defaults`.
Once this module is compiled, this information becomes available via
the `Code.fetch_docs/1` function.
### `@dialyzer`
Defines warnings to request or suppress when using a version of
`:dialyzer` that supports module attributes.
Accepts an atom, a tuple, or a list of atoms and tuples. For example:
defmodule MyModule do
@dialyzer {:nowarn_function, my_fun: 1}
def my_fun(arg) do
M.not_a_function(arg)
end
end
For the list of supported warnings, see
[`:dialyzer` module](http://www.erlang.org/doc/man/dialyzer.html).
Multiple uses of `@dialyzer` will accumulate instead of overriding
previous ones.
### `@external_resource`
Specifies an external resource for the current module.
Sometimes a module embeds information from an external file. This
attribute allows the module to annotate which external resources
have been used.
Tools like Mix may use this information to ensure the module is
recompiled in case any of the external resources change.
### `@file`
Changes the filename used in stacktraces for the function or macro that
follows the attribute, such as:
defmodule MyModule do
@doc "Hello world"
@file "hello.ex"
def hello do
"world"
end
end
### `@moduledoc`
Provides documentation for the current module.
defmodule MyModule do
@moduledoc """
A very useful module.
"""
@moduledoc authors: ["Alice", "Bob"]
end
Accepts a string (often a heredoc) or `false` where `@moduledoc false`
will make the module invisible to documentation extraction tools like
[`ExDoc`](https://hexdocs.pm/ex_doc/).
Similarly to `@doc` also accepts a keyword list to provide metadata
about the module. For more details, see the documentation of `@doc`
above.
Once this module is compiled, this information becomes available via
the `Code.fetch_docs/1` function.
### `@on_definition`
A hook that will be invoked when each function or macro in the current
module is defined. Useful when annotating functions.
Accepts a module or a `{module, function_name}` tuple. See the
"Compile callbacks" section below.
### `@on_load`
A hook that will be invoked whenever the module is loaded.
Accepts the function name (as an atom) of a function in the current module or
`{function_name, 0}` tuple where `function_name` is the name of a function in
the current module. The function must be public and have an arity of 0 (no
arguments). If the function does not return `:ok`, the loading of the module
will be aborted. For example:
defmodule MyModule do
@on_load :load_check
def load_check do
if some_condition() do
:ok
else
:abort
end
end
def some_condition do
false
end
end
Modules compiled with HiPE would not call this hook.
### `@vsn`
Specify the module version. Accepts any valid Elixir value, for example:
defmodule MyModule do
@vsn "1.0"
end
### Typespec attributes
The following attributes are part of typespecs and are also built-in in
Elixir:
* `@type` - defines a type to be used in `@spec`
* `@typep` - defines a private type to be used in `@spec`
* `@opaque` - defines an opaque type to be used in `@spec`
* `@spec` - provides a specification for a function
* `@callback` - provides a specification for a behaviour callback
* `@macrocallback` - provides a specification for a macro behaviour callback
* `@optional_callbacks` - specifies which behaviour callbacks and macro
behaviour callbacks are optional
* `@impl` - declares an implementation of a callback function or macro
### Custom attributes
In addition to the built-in attributes outlined above, custom attributes may
also be added. Custom attributes are expressed using the `@/1` operator followed
by a valid variable name. The value given to the custom attribute must be a valid
Elixir value:
defmodule MyModule do
@custom_attr [some: "stuff"]
end
For more advanced options available when defining custom attributes, see
`register_attribute/3`.
## Compile callbacks
There are three callbacks that are invoked when functions are defined,
as well as before and immediately after the module bytecode is generated.
### `@after_compile`
A hook that will be invoked right after the current module is compiled.
Accepts a module or a `{module, function_name}` tuple. The function
must take two arguments: the module environment and its bytecode.
When just a module is provided, the function is assumed to be
`__after_compile__/2`.
Callbacks registered first will run last.
#### Example
defmodule MyModule do
@after_compile __MODULE__
def __after_compile__(env, _bytecode) do
IO.inspect(env)
end
end
### `@before_compile`
A hook that will be invoked before the module is compiled.
Accepts a module or a `{module, function_or_macro_name}` tuple. The
function/macro must take one argument: the module environment. If
it's a macro, its returned value will be injected at the end of the
module definition before the compilation starts.
When just a module is provided, the function/macro is assumed to be
`__before_compile__/1`.
Callbacks registered first will run last. Any overridable definition
will be made concrete before the first callback runs. A definition may
be made overridable again in another before compile callback and it
will be made concrete one last time after all callbacks run.
*Note*: unlike `@after_compile`, the callback function/macro must
be placed in a separate module (because when the callback is invoked,
the current module does not yet exist).
#### Example
defmodule A do
defmacro __before_compile__(_env) do
quote do
def hello, do: "world"
end
end
end
defmodule B do
@before_compile A
end
B.hello()
#=> "world"
### `@on_definition`
A hook that will be invoked when each function or macro in the current
module is defined. Useful when annotating functions.
Accepts a module or a `{module, function_name}` tuple. The function
must take 6 arguments:
* the module environment
* the kind of the function/macro: `:def`, `:defp`, `:defmacro`, or `:defmacrop`
* the function/macro name
* the list of quoted arguments
* the list of quoted guards
* the quoted function body
Note the hook receives the quoted arguments and it is invoked before
the function is stored in the module. So `Module.defines?/2` will return
`false` for the first clause of every function.
If the function/macro being defined has multiple clauses, the hook will
be called for each clause.
Unlike other hooks, `@on_definition` will only invoke functions and
never macros. This is to avoid `@on_definition` callbacks from
redefining functions that have just been defined in favor of more
explicit approaches.
When just a module is provided, the function is assumed to be
`__on_definition__/6`.
#### Example
defmodule Hooks do
def on_def(_env, kind, name, args, guards, body) do
IO.puts("Defining #{kind} named #{name} with args:")
IO.inspect(args)
IO.puts("and guards")
IO.inspect(guards)
IO.puts("and body")
IO.puts(Macro.to_string(body))
end
end
defmodule MyModule do
@on_definition {Hooks, :on_def}
def hello(arg) when is_binary(arg) or is_list(arg) do
"Hello" <> to_string(arg)
end
def hello(_) do
:ok
end
end
## Compile options
The `@compile` attribute accepts different options that are used by both
Elixir and Erlang compilers. Some of the common use cases are documented
below:
* `@compile :debug_info` - includes `:debug_info` regardless of the
corresponding setting in `Code.compiler_options/1`
* `@compile {:debug_info, false}` - disables `:debug_info` regardless
of the corresponding setting in `Code.compiler_options/1`
* `@compile {:inline, some_fun: 2, other_fun: 3}` - inlines the given
name/arity pairs. Inlining is applied locally, calls from another
module are not affected by this option
* `@compile {:autoload, false}` - disables automatic loading of
modules after compilation. Instead, the module will be loaded after
it is dispatched to
* `@compile {:no_warn_undefined, Mod}` or
`@compile {:no_warn_undefined, {Mod, fun, arity}}` - does not warn if
the given module or the given `Mod.fun/arity` are not defined
You can see a handful more options used by the Erlang compiler in
the documentation for the [`:compile` module](http://www.erlang.org/doc/man/compile.html).
'''
@typep definition :: {atom, arity}
@typep def_kind :: :def | :defp | :defmacro | :defmacrop
@extra_error_msg_defines? "Use Kernel.function_exported?/3 and Kernel.macro_exported?/3 " <>
"to check for public functions and macros instead"
@extra_error_msg_definitions_in "Use the Module.__info__/1 callback to get public functions and macros instead"
@doc """
Provides runtime information about functions, macros, and other information
defined by the module.
Each module gets an `__info__/1` function when it's compiled. The function
takes one of the following items:
* `:attributes` - a keyword list with all persisted attributes
* `:compile` - a list with compiler metadata
* `:functions` - a keyword list of public functions and their arities
* `:macros` - a keyword list of public macros and their arities
* `:md5` - the MD5 of the module
* `:module` - the module atom name
"""
@callback __info__(:attributes) :: keyword()
@callback __info__(:compile) :: [term()]
@callback __info__(:functions) :: keyword()
@callback __info__(:macros) :: keyword()
@callback __info__(:md5) :: binary()
@callback __info__(:module) :: module()
@doc """
Checks if a module is open.
A module is "open" if it is currently being defined and its attributes and
functions can be modified.
"""
@spec open?(module) :: boolean
def open?(module) when is_atom(module) do
:elixir_module.is_open(module)
end
@doc """
Evaluates the quoted contents in the given module's context.
A list of environment options can also be given as argument.
See `Code.eval_string/3` for more information.
Raises an error if the module was already compiled.
## Examples
defmodule Foo do
contents =
quote do
def sum(a, b), do: a + b
end
Module.eval_quoted(__MODULE__, contents)
end
Foo.sum(1, 2)
#=> 3
For convenience, you can pass any `Macro.Env` struct, such
as `__ENV__/0`, as the first argument or as options. Both
the module and all options will be automatically extracted
from the environment:
defmodule Foo do
contents =
quote do
def sum(a, b), do: a + b
end
Module.eval_quoted(__ENV__, contents)
end
Foo.sum(1, 2)
#=> 3
Note that if you pass a `Macro.Env` struct as first argument
while also passing `opts`, they will be merged with `opts`
having precedence.
"""
@spec eval_quoted(module | Macro.Env.t(), Macro.t(), list, keyword | Macro.Env.t()) :: term
def eval_quoted(module_or_env, quoted, binding \\ [], opts \\ [])
def eval_quoted(%Macro.Env{} = env, quoted, binding, opts)
when is_list(binding) and is_list(opts) do
eval_quoted(env.module, quoted, binding, Keyword.merge(Map.to_list(env), opts))
end
def eval_quoted(module, quoted, binding, %Macro.Env{} = env)
when is_atom(module) and is_list(binding) do
eval_quoted(module, quoted, binding, Map.to_list(env))
end
def eval_quoted(module, quoted, binding, opts)
when is_atom(module) and is_list(binding) and is_list(opts) do
assert_not_compiled!(__ENV__.function, module)
:elixir_def.reset_last(module)
{value, binding, _env, _scope} =
:elixir.eval_quoted(quoted, binding, Keyword.put(opts, :module, module))
{value, binding}
end
@doc """
Creates a module with the given name and defined by
the given quoted expressions.
The line where the module is defined and its file **must**
be passed as options.
It returns a tuple of shape `{:module, module, binary, term}`
where `module` is the module name, `binary` is the module
byte code and `term` is the result of the last expression in
`quoted`.
Similar to `Kernel.defmodule/2`, the binary will only be
written to disk as a `.beam` file if `Module.create/3` is
invoked in a file that is currently being compiled.
## Examples
contents =
quote do
def world, do: true
end
Module.create(Hello, contents, Macro.Env.location(__ENV__))
Hello.world()
#=> true
## Differences from `defmodule`
`Module.create/3` works similarly to `Kernel.defmodule/2`
and return the same results. While one could also use
`defmodule` to define modules dynamically, this function
is preferred when the module body is given by a quoted
expression.
Another important distinction is that `Module.create/3`
allows you to control the environment variables used
when defining the module, while `Kernel.defmodule/2`
automatically uses the environment it is invoked at.
"""
@spec create(module, Macro.t(), Macro.Env.t() | keyword) :: {:module, module, binary, term}
def create(module, quoted, opts)
def create(module, quoted, %Macro.Env{} = env) when is_atom(module) do
create(module, quoted, Map.to_list(env))
end
def create(module, quoted, opts) when is_atom(module) and is_list(opts) do
unless Keyword.has_key?(opts, :file) do
raise ArgumentError, "expected :file to be given as option"
end
next = :elixir_module.next_counter(nil)
line = Keyword.get(opts, :line, 0)
quoted = :elixir_quote.linify_with_context_counter(line, {module, next}, quoted)
:elixir_module.compile(module, quoted, [], :elixir.env_for_eval(opts))
end
@doc """
Concatenates a list of aliases and returns a new alias.
## Examples
iex> Module.concat([Foo, Bar])
Foo.Bar
iex> Module.concat([Foo, "Bar"])
Foo.Bar
"""
@spec concat([binary | atom]) :: atom
def concat(list) when is_list(list) do
:elixir_aliases.concat(list)
end
@doc """
Concatenates two aliases and returns a new alias.
## Examples
iex> Module.concat(Foo, Bar)
Foo.Bar
iex> Module.concat(Foo, "Bar")
Foo.Bar
"""
@spec concat(binary | atom, binary | atom) :: atom
def concat(left, right)
when (is_binary(left) or is_atom(left)) and (is_binary(right) or is_atom(right)) do
:elixir_aliases.concat([left, right])
end
@doc """
Concatenates a list of aliases and returns a new alias only if the alias
was already referenced.
If the alias was not referenced yet, fails with `ArgumentError`.
It handles charlists, binaries and atoms.
## Examples
iex> Module.safe_concat([Module, Unknown])
** (ArgumentError) argument error
iex> Module.safe_concat([List, Chars])
List.Chars
"""
@spec safe_concat([binary | atom]) :: atom
def safe_concat(list) when is_list(list) do
:elixir_aliases.safe_concat(list)
end
@doc """
Concatenates two aliases and returns a new alias only if the alias was
already referenced.
If the alias was not referenced yet, fails with `ArgumentError`.
It handles charlists, binaries and atoms.
## Examples
iex> Module.safe_concat(Module, Unknown)
** (ArgumentError) argument error
iex> Module.safe_concat(List, Chars)
List.Chars
"""
@spec safe_concat(binary | atom, binary | atom) :: atom
def safe_concat(left, right)
when (is_binary(left) or is_atom(left)) and (is_binary(right) or is_atom(right)) do
:elixir_aliases.safe_concat([left, right])
end
# Build signatures to be stored in docs
defp build_signature(args, env) do
{reverse_args, counters} = simplify_args(args, %{}, [], env)
expand_keys(reverse_args, counters, [])
end
defp simplify_args([arg | args], counters, acc, env) do
{arg, counters} = simplify_arg(arg, counters, env)
simplify_args(args, counters, [arg | acc], env)
end
defp simplify_args([], counters, reverse_args, _env) do
{reverse_args, counters}
end
defp simplify_arg({:\\, _, [left, right]}, counters, env) do
{left, counters} = simplify_arg(left, counters, env)
right =
Macro.prewalk(right, fn
{:@, _, _} = attr -> Macro.expand_once(attr, env)
other -> other
end)
{{:\\, [], [left, right]}, counters}
end
# If the variable is being used explicitly for naming,
# we always give it a higher priority (nil) even if it
# starts with underscore.
defp simplify_arg({:=, _, [{var, _, atom}, _]}, counters, _env) when is_atom(atom) do
{simplify_var(var, nil), counters}
end
defp simplify_arg({:=, _, [_, {var, _, atom}]}, counters, _env) when is_atom(atom) do
{simplify_var(var, nil), counters}
end
# If we have only the variable as argument, it also gets
# higher priority. However, if the variable starts with an
# underscore, we give it a secondary context (Elixir) with
# lower priority.
defp simplify_arg({var, _, atom}, counters, _env) when is_atom(atom) do
{simplify_var(var, Elixir), counters}
end
defp simplify_arg({:%, _, [left, _]}, counters, env) do
case Macro.expand_once(left, env) do
module when is_atom(module) -> autogenerated_key(counters, simplify_module_name(module))
_ -> autogenerated_key(counters, :struct)
end
end
defp simplify_arg({:%{}, _, _}, counters, _env) do
autogenerated_key(counters, :map)
end
defp simplify_arg({:@, _, _} = attr, counters, env) do
simplify_arg(Macro.expand_once(attr, env), counters, env)
end
defp simplify_arg(other, counters, _env) when is_integer(other),
do: autogenerated_key(counters, :int)
defp simplify_arg(other, counters, _env) when is_boolean(other),
do: autogenerated_key(counters, :bool)
defp simplify_arg(other, counters, _env) when is_atom(other),
do: autogenerated_key(counters, :atom)
defp simplify_arg(other, counters, _env) when is_list(other),
do: autogenerated_key(counters, :list)
defp simplify_arg(other, counters, _env) when is_float(other),
do: autogenerated_key(counters, :float)
defp simplify_arg(other, counters, _env) when is_binary(other),
do: autogenerated_key(counters, :binary)
defp simplify_arg(_, counters, _env), do: autogenerated_key(counters, :arg)
defp simplify_var(var, guess_priority) do
case Atom.to_string(var) do
"_" -> {:_, [], guess_priority}
"_" <> rest -> {String.to_atom(rest), [], guess_priority}
_ -> {var, [], nil}
end
end
defp simplify_module_name(module) when is_atom(module) do
try do
split(module)
rescue
ArgumentError -> module
else
module_name -> String.to_atom(Macro.underscore(List.last(module_name)))
end
end
defp autogenerated_key(counters, key) do
case counters do
%{^key => :once} -> {key, Map.put(counters, key, 2)}
%{^key => value} -> {key, Map.put(counters, key, value + 1)}
%{} -> {key, Map.put(counters, key, :once)}
end
end
defp expand_keys([{:\\, meta, [key, default]} | keys], counters, acc) when is_atom(key) do
{var, counters} = expand_key(key, counters)
expand_keys(keys, counters, [{:\\, meta, [var, default]} | acc])
end
defp expand_keys([key | keys], counters, acc) when is_atom(key) do
{var, counters} = expand_key(key, counters)
expand_keys(keys, counters, [var | acc])
end
defp expand_keys([arg | args], counters, acc) do
expand_keys(args, counters, [arg | acc])
end
defp expand_keys([], _counters, acc) do
acc
end
defp expand_key(key, counters) do
case counters do
%{^key => count} when is_integer(count) and count >= 1 ->
{{:"#{key}#{count}", [], Elixir}, Map.put(counters, key, count - 1)}
_ ->
{{key, [], Elixir}, counters}
end
end
# Merge
defp merge_signatures([h1 | t1], [h2 | t2], i) do
[merge_signature(h1, h2, i) | merge_signatures(t1, t2, i + 1)]
end
defp merge_signatures([], [], _) do
[]
end
defp merge_signature({:\\, meta, [left, right]}, newer, i) do
{:\\, meta, [merge_signature(left, newer, i), right]}
end
defp merge_signature(older, {:\\, _, [left, _]}, i) do
merge_signature(older, left, i)
end
# The older signature, when given, always have higher precedence
defp merge_signature({_, _, nil} = older, _newer, _), do: older
defp merge_signature(_older, {_, _, nil} = newer, _), do: newer
# Both are a guess, so check if they are the same guess
defp merge_signature({var, _, _} = older, {var, _, _}, _), do: older
# Otherwise, returns a generic guess
defp merge_signature({_, meta, _}, _newer, i), do: {:"arg#{i}", meta, Elixir}
@doc """
Checks if the module defines the given function or macro.
Use `defines?/3` to assert for a specific type.
This function can only be used on modules that have not yet been compiled.
Use `Kernel.function_exported?/3` and `Kernel.macro_exported?/3` to check for
public functions and macros respectively in compiled modules.
Note that `defines?` returns false for functions and macros that have
been defined but then marked as overridable and no other implementation
has been provided. You can check the overridable status by calling
`overridable?/2`.
## Examples
defmodule Example do
Module.defines?(__MODULE__, {:version, 0}) #=> false
def version, do: 1
Module.defines?(__MODULE__, {:version, 0}) #=> true
end
"""
@spec defines?(module, definition) :: boolean
def defines?(module, {name, arity} = tuple)
when is_atom(module) and is_atom(name) and is_integer(arity) and arity >= 0 and arity <= 255 do
assert_not_compiled!(__ENV__.function, module, @extra_error_msg_defines?)
{set, _bag} = data_tables_for(module)
:ets.member(set, {:def, tuple})
end
@doc """
Checks if the module defines a function or macro of the
given `kind`.
`kind` can be any of `:def`, `:defp`, `:defmacro`, or `:defmacrop`.
This function can only be used on modules that have not yet been compiled.
Use `Kernel.function_exported?/3` and `Kernel.macro_exported?/3` to check for
public functions and macros respectively in compiled modules.
## Examples
defmodule Example do
Module.defines?(__MODULE__, {:version, 0}, :def) #=> false
def version, do: 1
Module.defines?(__MODULE__, {:version, 0}, :def) #=> true
end
"""
@spec defines?(module, definition, def_kind) :: boolean
def defines?(module, {name, arity} = tuple, def_kind)
when is_atom(module) and is_atom(name) and is_integer(arity) and arity >= 0 and arity <= 255 and
def_kind in [:def, :defp, :defmacro, :defmacrop] do
assert_not_compiled!(__ENV__.function, module, @extra_error_msg_defines?)
{set, _bag} = data_tables_for(module)
case :ets.lookup(set, {:def, tuple}) do
[{_, ^def_kind, _, _, _, _}] -> true
_ -> false
end
end
@doc """
Checks if the current module defines the given type (private, opaque or not).
This function is only available for modules being compiled.
"""
@doc since: "1.7.0"
@spec defines_type?(module, definition) :: boolean
def defines_type?(module, definition) do
Kernel.Typespec.defines_type?(module, definition)
end
@doc """
Copies the given spec as a callback.
Returns `true` if there is such a spec and it was copied as a callback.
If the function associated to the spec has documentation defined prior to
invoking this function, the docs are copied too.
"""
@doc since: "1.7.0"
@spec spec_to_callback(module, definition) :: boolean
def spec_to_callback(module, definition) do
Kernel.Typespec.spec_to_callback(module, definition)
end
@doc """
Returns all functions and macros defined in `module`.
It returns a list with all defined functions and macros, public and private,
in the shape of `[{name, arity}, ...]`.
This function can only be used on modules that have not yet been compiled.
Use the `c:Module.__info__/1` callback to get the public functions and macros in
compiled modules.
## Examples
defmodule Example do
def version, do: 1
defmacrop test(arg), do: arg
Module.definitions_in(__MODULE__) #=> [{:version, 0}, {:test, 1}]
end
"""
@spec definitions_in(module) :: [definition]
def definitions_in(module) when is_atom(module) do
assert_not_compiled!(__ENV__.function, module, @extra_error_msg_definitions_in)
{_, bag} = data_tables_for(module)
bag_lookup_element(bag, :defs, 2)
end
@doc """
Returns all functions defined in `module`, according
to its kind.
This function can only be used on modules that have not yet been compiled.
Use the `c:Module.__info__/1` callback to get the public functions and macros in
compiled modules.
## Examples
defmodule Example do
def version, do: 1
Module.definitions_in(__MODULE__, :def) #=> [{:version, 0}]
Module.definitions_in(__MODULE__, :defp) #=> []
end
"""
@spec definitions_in(module, def_kind) :: [definition]
def definitions_in(module, def_kind)
when is_atom(module) and def_kind in [:def, :defp, :defmacro, :defmacrop] do
assert_not_compiled!(__ENV__.function, module, @extra_error_msg_definitions_in)
{set, _} = data_tables_for(module)
:lists.concat(:ets.match(set, {{:def, :"$1"}, def_kind, :_, :_, :_, :_}))
end
@doc """
Makes the given functions in `module` overridable.
An overridable function is lazily defined, allowing a
developer to customize it. See `Kernel.defoverridable/1` for
more information and documentation.
Once a function or a macro is marked as overridable, it will
no longer be listed under `definitions_in/1` or return true
when given to `defines?/2` until another implementation is
given.
"""
@spec make_overridable(module, [definition]) :: :ok
def make_overridable(module, tuples) when is_atom(module) and is_list(tuples) do
assert_not_compiled!(__ENV__.function, module)
func = fn
{function_name, arity} = tuple
when is_atom(function_name) and is_integer(arity) and arity >= 0 and arity <= 255 ->
case :elixir_def.take_definition(module, tuple) do
false ->
raise ArgumentError,
"cannot make function #{function_name}/#{arity} " <>
"overridable because it was not defined"
clause ->
neighbours = :elixir_locals.yank(tuple, module)
:elixir_overridable.record_overridable(module, tuple, clause, neighbours)
end
other ->
raise ArgumentError,
"each element in tuple list has to be a " <>
"{function_name :: atom, arity :: 0..255} tuple, got: #{inspect(other)}"
end
:lists.foreach(func, tuples)
end
@spec make_overridable(module, module) :: :ok
def make_overridable(module, behaviour) when is_atom(module) and is_atom(behaviour) do
case check_module_for_overridable(module, behaviour) do
:ok ->
:ok
{:error, error_explanation} ->
raise ArgumentError,
"cannot pass module #{inspect(behaviour)} as argument " <>
"to defoverridable/1 because #{error_explanation}"
end
behaviour_callbacks =
for callback <- behaviour_info(behaviour, :callbacks) do
{pair, _kind} = normalize_macro_or_function_callback(callback)
pair
end
tuples =
for definition <- definitions_in(module),
definition in behaviour_callbacks,
do: definition
make_overridable(module, tuples)
end
defp check_module_for_overridable(module, behaviour) do
{_, bag} = data_tables_for(module)
behaviour_definitions = bag_lookup_element(bag, {:accumulate, :behaviour}, 2)
cond do
not Code.ensure_compiled?(behaviour) ->
{:error, "it was not defined"}
not function_exported?(behaviour, :behaviour_info, 1) ->
{:error, "it does not define any callbacks"}
behaviour not in behaviour_definitions ->
error_message =
"its corresponding behaviour is missing. Did you forget to " <>
"add @behaviour #{inspect(behaviour)}?"
{:error, error_message}
true ->
:ok
end
end
defp normalize_macro_or_function_callback({function_name, arity}) do
case :erlang.atom_to_list(function_name) do
# Macros are always provided one extra argument in behaviour_info/1
'MACRO-' ++ tail ->
{{:erlang.list_to_atom(tail), arity - 1}, :defmacro}
_ ->
{{function_name, arity}, :def}
end
end
defp behaviour_info(module, key) do
case module.behaviour_info(key) do
list when is_list(list) -> list
:undefined -> []
end
end
@doc """
Returns `true` if `tuple` in `module` is marked as overridable.
"""
@spec overridable?(module, definition) :: boolean
def overridable?(module, {function_name, arity} = tuple)
when is_atom(function_name) and is_integer(arity) and arity >= 0 and arity <= 255 do
:elixir_overridable.overridable_for(module, tuple) != :not_overridable
end
@doc """
Puts a module attribute with `key` and `value` in the given `module`.
## Examples
defmodule MyModule do
Module.put_attribute(__MODULE__, :custom_threshold_for_lib, 10)
end
"""
@spec put_attribute(module, atom, term) :: :ok
def put_attribute(module, key, value) when is_atom(module) and is_atom(key) do
__put_attribute__(module, key, value, nil)
end
@doc """
Gets the given attribute from a module.
If the attribute was marked with `accumulate` with
`Module.register_attribute/3`, a list is always returned.
`nil` is returned if the attribute has not been marked with
`accumulate` and has not been set to any value.
The `@` macro compiles to a call to this function. For example,
the following code:
@foo
Expands to something akin to:
Module.get_attribute(__MODULE__, :foo)
This function can only be used on modules that have not yet been compiled.
Use the `c:Module.__info__/1` callback to get all persisted attributes, or
`Code.fetch_docs/1` to retrieve all documentation related attributes in
compiled modules.
## Examples
defmodule Foo do
Module.put_attribute(__MODULE__, :value, 1)
Module.get_attribute(__MODULE__, :value) #=> 1
Module.get_attribute(__MODULE__, :value, :default) #=> 1
Module.get_attribute(__MODULE__, :not_found, :default) #=> :default
Module.register_attribute(__MODULE__, :value, accumulate: true)
Module.put_attribute(__MODULE__, :value, 1)
Module.get_attribute(__MODULE__, :value) #=> [1]
end
"""
@spec get_attribute(module, atom, term) :: term
def get_attribute(module, key, default \\ nil) when is_atom(module) and is_atom(key) do
case __get_attribute__(module, key, nil) do
nil -> default
value -> value
end
end
@doc """
Deletes the module attribute that matches the given key.
It returns the deleted attribute value (or `nil` if nothing was set).
## Examples
defmodule MyModule do
Module.put_attribute(__MODULE__, :custom_threshold_for_lib, 10)
Module.delete_attribute(__MODULE__, :custom_threshold_for_lib)
end
"""
@spec delete_attribute(module, atom) :: term
def delete_attribute(module, key) when is_atom(module) and is_atom(key) do
assert_not_compiled!(__ENV__.function, module)
{set, bag} = data_tables_for(module)
case :ets.lookup(set, key) do
[{_, _, :accumulate}] ->
reverse_values(:ets.take(bag, {:accumulate, key}), [])
[{_, value, _}] ->
:ets.delete(set, key)
value
[] ->
nil
end
end
defp reverse_values([{_, value} | tail], acc), do: reverse_values(tail, [value | acc])
defp reverse_values([], acc), do: acc
@doc """
Registers an attribute.
By registering an attribute, a developer is able to customize
how Elixir will store and accumulate the attribute values.
## Options
When registering an attribute, two options can be given:
* `:accumulate` - several calls to the same attribute will
accumulate instead of overriding the previous one. New attributes
are always added to the top of the accumulated list.
* `:persist` - the attribute will be persisted in the Erlang
Abstract Format. Useful when interfacing with Erlang libraries.
By default, both options are `false`.
## Examples
defmodule MyModule do
Module.register_attribute(__MODULE__, :custom_threshold_for_lib, accumulate: true)
@custom_threshold_for_lib 10
@custom_threshold_for_lib 20
@custom_threshold_for_lib #=> [20, 10]
end
"""
@spec register_attribute(module, atom, [{:accumulate, boolean}, {:persist, boolean}]) :: :ok
def register_attribute(module, attribute, options)
when is_atom(module) and is_atom(attribute) and is_list(options) do
assert_not_compiled!(__ENV__.function, module)
{set, bag} = data_tables_for(module)
if Keyword.get(options, :persist) do
:ets.insert(bag, {:persisted_attributes, attribute})
end
if Keyword.get(options, :accumulate) do
:ets.insert_new(set, {attribute, [], :accumulate}) ||
:ets.update_element(set, attribute, {3, :accumulate})
end
:ok
end
@doc """
Splits the given module name into binary parts.
`module` has to be an Elixir module, as `split/1` won't work with Erlang-style
modules (for example, `split(:lists)` raises an error).
`split/1` also supports splitting the string representation of Elixir modules
(that is, the result of calling `Atom.to_string/1` with the module name).
## Examples
iex> Module.split(Very.Long.Module.Name.And.Even.Longer)
["Very", "Long", "Module", "Name", "And", "Even", "Longer"]
iex> Module.split("Elixir.String.Chars")
["String", "Chars"]
"""
@spec split(module | String.t()) :: [String.t(), ...]
def split(module)
def split(module) when is_atom(module) do
split(Atom.to_string(module), _original = module)
end
def split(module) when is_binary(module) do
split(module, _original = module)
end
defp split("Elixir." <> name, _original) do
String.split(name, ".")
end
defp split(_module, original) do
raise ArgumentError, "expected an Elixir module, got: #{inspect(original)}"
end
@doc false
@deprecated "Use @doc instead"
def add_doc(module, line, kind, {name, arity}, signature \\ [], doc) do
assert_not_compiled!(__ENV__.function, module)
if kind in [:defp, :defmacrop, :typep] do
if doc, do: {:error, :private_doc}, else: :ok
else
{set, _bag} = data_tables_for(module)
compile_doc(set, line, kind, name, arity, signature, nil, doc, %{}, __ENV__, false)
:ok
end
end
@doc false
# Used internally to compile documentation.
# This function is private and must be used only internally.
def compile_definition_attributes(env, kind, name, args, _guards, body) do
%{module: module} = env
{set, bag} = data_tables_for(module)
{arity, defaults} = args_count(args, 0, 0)
impl = compile_impl(set, bag, name, env, kind, arity, defaults)
doc_meta = compile_doc_meta(set, bag, name, arity, defaults)
{line, doc} = get_doc_info(set, env)
compile_doc(set, line, kind, name, arity, args, body, doc, doc_meta, env, impl)
:ok
end
defp compile_doc(_table, line, kind, name, arity, _args, _body, doc, _doc_meta, env, _impl)
when kind in [:defp, :defmacrop] do
if doc do
message =
"#{kind} #{name}/#{arity} is private, " <>
"@doc attribute is always discarded for private functions/macros/types"
IO.warn(message, Macro.Env.stacktrace(%{env | line: line}))
end
end
defp compile_doc(table, line, kind, name, arity, args, _body, doc, doc_meta, env, impl) do
key = {doc_key(kind), name, arity}
signature = build_signature(args, env)
case :ets.lookup(table, key) do
[] ->
doc = if is_nil(doc) && impl, do: false, else: doc
:ets.insert(table, {key, line, signature, doc, doc_meta})
[{_, current_line, current_sign, current_doc, current_doc_meta}] ->
signature = merge_signatures(current_sign, signature, 1)
doc = if is_nil(doc), do: current_doc, else: doc
doc = if is_nil(doc) && impl, do: false, else: doc
doc_meta = Map.merge(current_doc_meta, doc_meta)
:ets.insert(table, {key, current_line, signature, doc, doc_meta})
end
end
defp doc_key(:def), do: :function
defp doc_key(:defmacro), do: :macro
defp compile_doc_meta(set, bag, name, arity, defaults) do
doc_meta = compile_deprecated(%{}, set, bag, name, arity, defaults)
doc_meta = get_doc_meta(doc_meta, set)
add_defaults_count(doc_meta, defaults)
end
defp get_doc_meta(existing_meta, set) do
case :ets.take(set, {:doc, :meta}) do
[{{:doc, :meta}, metadata, _}] -> Map.merge(existing_meta, metadata)
[] -> existing_meta
end
end
defp compile_deprecated(doc_meta, set, bag, name, arity, defaults) do
case :ets.take(set, :deprecated) do
[{:deprecated, reason, _}] when is_binary(reason) ->
:ets.insert(bag, deprecated_reasons(defaults, name, arity, reason))
Map.put(doc_meta, :deprecated, reason)
_ ->
doc_meta
end
end
defp add_defaults_count(doc_meta, 0), do: doc_meta
defp add_defaults_count(doc_meta, n), do: Map.put(doc_meta, :defaults, n)
defp deprecated_reasons(0, name, arity, reason) do
[deprecated_reason(name, arity, reason)]
end
defp deprecated_reasons(defaults, name, arity, reason) do
[
deprecated_reason(name, arity - defaults, reason)
| deprecated_reasons(defaults - 1, name, arity, reason)
]
end
defp deprecated_reason(name, arity, reason),
do: {:deprecated, {{name, arity}, reason}}
defp compile_impl(set, bag, name, env, kind, arity, defaults) do
%{line: line, file: file} = env
case :ets.take(set, :impl) do
[{:impl, value, _}] ->
pair = {name, arity}
meta = :ets.lookup_element(set, {:def, pair}, 3)
impl = {pair, Keyword.get(meta, :context), defaults, kind, line, file, value}
:ets.insert(bag, {:impls, impl})
value
[] ->
false
end
end
defp args_count([{:\\, _, _} | tail], total, defaults) do
args_count(tail, total + 1, defaults + 1)
end
defp args_count([_head | tail], total, defaults) do
args_count(tail, total + 1, defaults)
end
defp args_count([], total, defaults), do: {total, defaults}
@doc false
def check_behaviours_and_impls(env, _set, bag, all_definitions) do
behaviours = bag_lookup_element(bag, {:accumulate, :behaviour}, 2)
impls = bag_lookup_element(bag, :impls, 2)
callbacks = check_behaviours(env, behaviours)
pending_callbacks =
if impls != [] do
{non_implemented_callbacks, contexts} = check_impls(env, behaviours, callbacks, impls)
warn_missing_impls(env, non_implemented_callbacks, contexts, all_definitions)
non_implemented_callbacks
else
callbacks
end
check_callbacks(env, pending_callbacks, all_definitions)
:ok
end
defp check_behaviours(%{lexical_tracker: pid} = env, behaviours) do
Enum.reduce(behaviours, %{}, fn behaviour, acc ->
cond do
not is_atom(behaviour) ->
message =
"@behaviour #{inspect(behaviour)} must be an atom (in module #{inspect(env.module)})"
IO.warn(message, Macro.Env.stacktrace(env))
acc
not Code.ensure_compiled?(behaviour) ->
message =
"@behaviour #{inspect(behaviour)} does not exist (in module #{inspect(env.module)})"
IO.warn(message, Macro.Env.stacktrace(env))
acc
not function_exported?(behaviour, :behaviour_info, 1) ->
message =
"module #{inspect(behaviour)} is not a behaviour (in module #{inspect(env.module)})"
IO.warn(message, Macro.Env.stacktrace(env))
acc
true ->
:elixir_lexical.record_remote(behaviour, nil, pid)
optional_callbacks = behaviour_info(behaviour, :optional_callbacks)
callbacks = behaviour_info(behaviour, :callbacks)
Enum.reduce(callbacks, acc, &add_callback(&1, behaviour, env, optional_callbacks, &2))
end
end)
end
defp add_callback(original, behaviour, env, optional_callbacks, acc) do
{callback, kind} = normalize_macro_or_function_callback(original)
case acc do
%{^callback => {_kind, conflict, _optional?}} ->
message =
if conflict == behaviour do
"the behavior #{inspect(conflict)} has been declared twice " <>
"(conflict in #{format_definition(kind, callback)} in module #{inspect(env.module)})"
else
"conflicting behaviours found. #{format_definition(kind, callback)} is required by " <>
"#{inspect(conflict)} and #{inspect(behaviour)} (in module #{inspect(env.module)})"
end
IO.warn(message, Macro.Env.stacktrace(env))
%{} ->
:ok
end
Map.put(acc, callback, {kind, behaviour, original in optional_callbacks})
end
defp check_callbacks(env, callbacks, all_definitions) do
for {callback, {kind, behaviour, optional?}} <- callbacks do
case :lists.keyfind(callback, 1, all_definitions) do
false when not optional? ->
message =
format_callback(callback, kind, behaviour) <>
" is not implemented (in module #{inspect(env.module)})"
IO.warn(message, Macro.Env.stacktrace(env))
{_, wrong_kind, _, _} when kind != wrong_kind ->
message =
format_callback(callback, kind, behaviour) <>
" was implemented as \"#{wrong_kind}\" but should have been \"#{kind}\" " <>
"(in module #{inspect(env.module)})"
IO.warn(message, Macro.Env.stacktrace(env))
_ ->
:ok
end
end
:ok
end
defp format_callback(callback, kind, module) do
protocol_or_behaviour = if protocol?(module), do: "protocol ", else: "behaviour "
format_definition(kind, callback) <>
" required by " <> protocol_or_behaviour <> inspect(module)
end
defp protocol?(module) do
Code.ensure_loaded?(module) and function_exported?(module, :__protocol__, 1) and
module.__protocol__(:module) == module
end
defp check_impls(env, behaviours, callbacks, impls) do
acc = {callbacks, %{}}
Enum.reduce(impls, acc, fn {fa, context, defaults, kind, line, file, value}, acc ->
case impl_behaviours(fa, defaults, kind, value, behaviours, callbacks) do
{:ok, impl_behaviours} ->
Enum.reduce(impl_behaviours, acc, fn {fa, behaviour}, {callbacks, contexts} ->
callbacks = Map.delete(callbacks, fa)
contexts = Map.update(contexts, behaviour, [context], &[context | &1])
{callbacks, contexts}
end)
{:error, message} ->
formatted = format_impl_warning(fa, kind, message)
IO.warn(formatted, Macro.Env.stacktrace(%{env | line: line, file: file}))
acc
end
end)
end
defp impl_behaviours({function, arity}, defaults, kind, value, behaviours, callbacks) do
impls = for n <- arity..(arity - defaults), do: {function, n}
impl_behaviours(impls, kind, value, behaviours, callbacks)
end
defp impl_behaviours(_, kind, _, _, _) when kind in [:defp, :defmacrop] do
{:error, :private_function}
end
defp impl_behaviours(_, _, value, [], _) do
{:error, {:no_behaviours, value}}
end
defp impl_behaviours(impls, _, false, _, callbacks) do
case callbacks_for_impls(impls, callbacks) do
[] -> {:ok, []}
[impl | _] -> {:error, {:impl_not_defined, impl}}
end
end
defp impl_behaviours(impls, _, true, _, callbacks) do
case callbacks_for_impls(impls, callbacks) do
[] -> {:error, {:impl_defined, callbacks}}
impls -> {:ok, impls}
end
end
defp impl_behaviours(impls, _, behaviour, behaviours, callbacks) do
filtered = behaviour_callbacks_for_impls(impls, behaviour, callbacks)
cond do
filtered != [] ->
{:ok, filtered}
behaviour not in behaviours ->
{:error, {:behaviour_not_declared, behaviour}}
true ->
{:error, {:behaviour_not_defined, behaviour, callbacks}}
end
end
defp behaviour_callbacks_for_impls([], _behaviour, _callbacks) do
[]
end
defp behaviour_callbacks_for_impls([fa | tail], behaviour, callbacks) do
case callbacks[fa] do
{_, ^behaviour, _} ->
[{fa, behaviour} | behaviour_callbacks_for_impls(tail, behaviour, callbacks)]
_ ->
behaviour_callbacks_for_impls(tail, behaviour, callbacks)
end
end
defp callbacks_for_impls([], _) do
[]
end
defp callbacks_for_impls([fa | tail], callbacks) do
case callbacks[fa] do
{_, behaviour, _} -> [{fa, behaviour} | callbacks_for_impls(tail, callbacks)]
nil -> callbacks_for_impls(tail, callbacks)
end
end
defp format_impl_warning(fa, kind, :private_function) do
"#{format_definition(kind, fa)} is private, @impl attribute is always discarded for private functions/macros"
end
defp format_impl_warning(fa, kind, {:no_behaviours, value}) do
"got \"@impl #{inspect(value)}\" for #{format_definition(kind, fa)} but no behaviour was declared"
end
defp format_impl_warning(_, kind, {:impl_not_defined, {fa, behaviour}}) do
"got \"@impl false\" for #{format_definition(kind, fa)} " <>
"but it is a callback specified in #{inspect(behaviour)}"
end
defp format_impl_warning(fa, kind, {:impl_defined, callbacks}) do
"got \"@impl true\" for #{format_definition(kind, fa)} " <>
"but no behaviour specifies such callback#{known_callbacks(callbacks)}"
end
defp format_impl_warning(fa, kind, {:behaviour_not_declared, behaviour}) do
"got \"@impl #{inspect(behaviour)}\" for #{format_definition(kind, fa)} " <>
"but this behaviour was not declared with @behaviour"
end
defp format_impl_warning(fa, kind, {:behaviour_not_defined, behaviour, callbacks}) do
"got \"@impl #{inspect(behaviour)}\" for #{format_definition(kind, fa)} " <>
"but this behaviour does not specify such callback#{known_callbacks(callbacks)}"
end
defp warn_missing_impls(_env, callbacks, _contexts, _defs) when map_size(callbacks) == 0 do
:ok
end
defp warn_missing_impls(env, non_implemented_callbacks, contexts, defs) do
for {pair, kind, meta, _clauses} <- defs,
kind in [:def, :defmacro] do
with {:ok, {_, behaviour, _}} <- Map.fetch(non_implemented_callbacks, pair),
true <- missing_impl_in_context?(meta, behaviour, contexts) do
message =
"module attribute @impl was not set for #{format_definition(kind, pair)} " <>
"callback (specified in #{inspect(behaviour)}). " <>
"This either means you forgot to add the \"@impl true\" annotation before the " <>
"definition or that you are accidentally overriding this callback"
IO.warn(message, Macro.Env.stacktrace(%{env | line: :elixir_utils.get_line(meta)}))
end
end
:ok
end
defp missing_impl_in_context?(meta, behaviour, contexts) do
case contexts do
%{^behaviour => known} -> Keyword.get(meta, :context) in known
%{} -> not Keyword.has_key?(meta, :context)
end
end
defp format_definition(kind, {name, arity}) do
format_definition(kind) <> " #{name}/#{arity}"
end
defp format_definition(:defmacro), do: "macro"
defp format_definition(:defmacrop), do: "macro"
defp format_definition(:def), do: "function"
defp format_definition(:defp), do: "function"
defp known_callbacks(callbacks) when map_size(callbacks) == 0 do
". There are no known callbacks, please specify the proper @behaviour " <>
"and make sure it defines callbacks"
end
defp known_callbacks(callbacks) do
formatted_callbacks =
for {{name, arity}, {kind, module, _}} <- callbacks do
"\n * " <> Exception.format_mfa(module, name, arity) <> " (#{format_definition(kind)})"
end
". The known callbacks are:\n#{formatted_callbacks}\n"
end
@doc false
# Used internally by Kernel's @.
# This function is private and must be used only internally.
def __get_attribute__(module, key, line) when is_atom(key) do
assert_not_compiled!(
{:get_attribute, 2},
module,
"Use the Module.__info__/1 callback or Code.fetch_docs/1 instead"
)
{set, bag} = data_tables_for(module)
case :ets.lookup(set, key) do
[{_, _, :accumulate}] ->
:lists.reverse(bag_lookup_element(bag, {:accumulate, key}, 2))
[{_, val, nil}] ->
val
[{_, val, _}] ->
:ets.update_element(set, key, {3, nil})
val
[] when is_integer(line) ->
# TODO: Consider raising instead of warning on v2.0 as it usually cascades
error_message =
"undefined module attribute @#{key}, " <>
"please remove access to @#{key} or explicitly set it before access"
IO.warn(error_message, attribute_stack(module, line))
nil
[] ->
nil
end
end
@doc false
# Used internally by Kernel's @.
# This function is private and must be used only internally.
def __put_attribute__(module, key, value, line) when is_atom(key) do
assert_not_compiled!(__ENV__.function, module)
{set, bag} = data_tables_for(module)
value = preprocess_attribute(key, value)
put_attribute(module, key, value, line, set, bag)
:ok
end
# If any of the doc attributes are called with a keyword list that
# will become documentation metadata. Multiple calls will be merged
# into the same map overriding duplicate keys.
defp put_attribute(module, key, {_, metadata}, line, set, _bag)
when key in [:doc, :typedoc, :moduledoc] and is_list(metadata) do
metadata_map = preprocess_doc_meta(metadata, module, line, %{})
case :ets.insert_new(set, {{key, :meta}, metadata_map, line}) do
true ->
:ok
false ->
current_metadata = :ets.lookup_element(set, {key, :meta}, 2)
:ets.update_element(set, {key, :meta}, {2, Map.merge(current_metadata, metadata_map)})
end
end
# Optimize some attributes by avoiding writing to the attributes key
# in the bag table since we handle them internally.
defp put_attribute(module, key, value, line, set, _bag)
when key in [:doc, :typedoc, :moduledoc, :impl, :deprecated] do
try do
:ets.lookup_element(set, key, 3)
catch
:error, :badarg -> :ok
else
unread_line when is_integer(line) and is_integer(unread_line) ->
message = "redefining @#{key} attribute previously set at line #{unread_line}"
IO.warn(message, attribute_stack(module, line))
_ ->
:ok
end
:ets.insert(set, {key, value, line})
end
defp put_attribute(_module, :on_load, value, line, set, bag) do
try do
:ets.lookup_element(set, :on_load, 3)
catch
:error, :badarg ->
:ets.insert(set, {:on_load, value, line})
:ets.insert(bag, {:attributes, :on_load})
else
_ -> raise ArgumentError, "the @on_load attribute can only be set once per module"
end
end
defp put_attribute(_module, key, value, line, set, bag) do
try do
:ets.lookup_element(set, key, 3)
catch
:error, :badarg ->
:ets.insert(set, {key, value, line})
:ets.insert(bag, {:attributes, key})
else
:accumulate -> :ets.insert(bag, {{:accumulate, key}, value})
_ -> :ets.insert(set, {key, value, line})
end
end
defp attribute_stack(module, line) do
file = String.to_charlist(Path.relative_to_cwd(:elixir_module.file(module)))
[{module, :__MODULE__, 0, file: file, line: line}]
end
## Helpers
defp preprocess_attribute(key, value) when key in [:moduledoc, :typedoc, :doc] do
case value do
{line, doc} when is_integer(line) and (is_binary(doc) or doc == false or is_nil(doc)) ->
value
{line, [{key, _} | _]} when is_integer(line) and is_atom(key) ->
value
{line, doc} when is_integer(line) ->
raise ArgumentError,
"@#{key} is a built-in module attribute for documentation. It should be either " <>
"false, nil, a string, or a keyword list, got: #{inspect(doc)}"
_other ->
raise ArgumentError,
"@#{key} is a built-in module attribute for documentation. When set dynamically, " <>
"it should be {line, doc} (where \"doc\" is either false, nil, a string, or a keyword list), " <>
"got: #{inspect(value)}"
end
end
defp preprocess_attribute(:on_load, value) do
case value do
_ when is_atom(value) ->
{value, 0}
{atom, 0} = tuple when is_atom(atom) ->
tuple
_ ->
raise ArgumentError,
"@on_load is a built-in module attribute that annotates a function to be invoked " <>
"when the module is loaded. It should be an atom or a {atom, 0} tuple, " <>
"got: #{inspect(value)}"
end
end
defp preprocess_attribute(:impl, value) do
case value do
_ when is_boolean(value) ->
value
module when is_atom(module) and module != nil ->
# Attempt to compile behaviour but ignore failure (will warn later)
_ = Code.ensure_compiled(module)
value
_ ->
raise ArgumentError,
"@impl is a built-in module attribute that marks the next definition " <>
"as a callback implementation. It should be a module or a boolean, " <>
"got: #{inspect(value)}"
end
end
defp preprocess_attribute(:before_compile, atom) when is_atom(atom),
do: {atom, :__before_compile__}
defp preprocess_attribute(:after_compile, atom) when is_atom(atom),
do: {atom, :__after_compile__}
defp preprocess_attribute(:on_definition, atom) when is_atom(atom),
do: {atom, :__on_definition__}
defp preprocess_attribute(key, _value)
when key in [:type, :typep, :opaque, :spec, :callback, :macrocallback] do
raise ArgumentError,
"attributes type, typep, opaque, spec, callback, and macrocallback " <>
"must be set directly via the @ notation"
end
defp preprocess_attribute(:external_resource, value) when not is_binary(value) do
raise ArgumentError,
"@external_resource is a built-in module attribute used for specifying file " <>
"dependencies. It should be a string the path to a file, got: #{inspect(value)}"
end
defp preprocess_attribute(:deprecated, value) when not is_binary(value) do
raise ArgumentError,
"@deprecated is a built-in module attribute that annotates a definition as deprecated. " <>
"It should be a string with the reason for the deprecation, got: #{inspect(value)}"
end
defp preprocess_attribute(:file, value) do
case value do
_ when is_binary(value) ->
value
{file, line} when is_binary(file) and is_integer(line) ->
value
_ ->
raise ArgumentError,
"@file is a built-in module attribute that annotates the file and line the next " <>
"definition comes from. It should be a string or {string, line} tuple as value, " <>
"got: #{inspect(value)}"
end
end
defp preprocess_attribute(_key, value) do
value
end
defp preprocess_doc_meta([], _module, _line, map), do: map
defp preprocess_doc_meta([{key, _} | tail], module, line, map)
when key in [:opaque, :defaults] do
message = "ignoring reserved documentation metadata key: #{inspect(key)}"
IO.warn(message, attribute_stack(module, line))
preprocess_doc_meta(tail, module, line, map)
end
defp preprocess_doc_meta([{key, value} | tail], module, line, map) when is_atom(key) do
validate_doc_meta(key, value)
preprocess_doc_meta(tail, module, line, Map.put(map, key, value))
end
defp validate_doc_meta(:since, value) when not is_binary(value) do
raise ArgumentError,
":since is a built-in documentation metadata key. It should be a string representing " <>
"the version in which the documented entity was added, got: #{inspect(value)}"
end
defp validate_doc_meta(:deprecated, value) when not is_binary(value) do
raise ArgumentError,
":deprecated is a built-in documentation metadata key. It should be a string " <>
"representing the replacement for the deprecated entity, got: #{inspect(value)}"
end
defp validate_doc_meta(:delegate_to, value) do
case value do
{m, f, a} when is_atom(m) and is_atom(f) and is_integer(a) and a >= 0 ->
:ok
_ ->
raise ArgumentError,
":delegate_to is a built-in documentation metadata key. It should be a three-element " <>
"tuple in the form of {module, function, arity}, got: #{inspect(value)}"
end
end
defp validate_doc_meta(_, _), do: :ok
defp get_doc_info(table, env) do
case :ets.take(table, :doc) do
[{:doc, {_, _} = pair, _}] ->
pair
[] ->
{env.line, nil}
end
end
defp data_tables_for(module) do
:elixir_module.data_tables(module)
end
defp bag_lookup_element(table, key, pos) do
:ets.lookup_element(table, key, pos)
catch
:error, :badarg -> []
end
defp assert_not_compiled!(function_name_arity, module, extra_msg \\ "") do
open?(module) ||
raise ArgumentError,
assert_not_compiled_message(function_name_arity, module, extra_msg)
end
defp assert_not_compiled_message({function_name, arity}, module, extra_msg) do
mfa = "Module.#{function_name}/#{arity}"
"could not call #{mfa} because the module #{inspect(module)} is already compiled" <>
case extra_msg do
"" -> ""
_ -> ". " <> extra_msg
end
end
end
| 32.238095 | 113 | 0.663898 |
9e0b84a5240ec353caeb5e44e61eecf705cfb381 | 1,571 | ex | Elixir | farmbot_os/platform/target/configurator/vintage_network_layer.ex | SeppPenner/farmbot_os | 39ba5c5880f8aef71792e2c009514bed1177089c | [
"MIT"
] | 1 | 2019-08-06T11:51:48.000Z | 2019-08-06T11:51:48.000Z | farmbot_os/platform/target/configurator/vintage_network_layer.ex | SeppPenner/farmbot_os | 39ba5c5880f8aef71792e2c009514bed1177089c | [
"MIT"
] | null | null | null | farmbot_os/platform/target/configurator/vintage_network_layer.ex | SeppPenner/farmbot_os | 39ba5c5880f8aef71792e2c009514bed1177089c | [
"MIT"
] | null | null | null | defmodule FarmbotOS.Platform.Target.Configurator.VintageNetworkLayer do
@behaviour FarmbotOS.Configurator.NetworkLayer
@impl FarmbotOS.Configurator.NetworkLayer
def list_interfaces() do
VintageNet.all_interfaces()
|> Kernel.--(["usb0", "lo"])
|> Enum.map(fn ifname ->
[{["interface", ^ifname, "mac_address"], mac_address}] =
VintageNet.get_by_prefix(["interface", ifname, "mac_address"])
{ifname, %{mac_address: mac_address}}
end)
end
@impl FarmbotOS.Configurator.NetworkLayer
def scan(ifname) do
_ = VintageNet.scan(ifname)
[{_, aps}] = VintageNet.get_by_prefix(["interface", "wlan0", "wifi", "access_points"])
Enum.map(aps, fn {_bssid, %{bssid: bssid, ssid: ssid, signal_percent: signal, flags: flags}} ->
%{
ssid: ssid,
bssid: bssid,
level: signal,
security: flags_to_security(flags)
}
end)
|> Enum.uniq_by(fn %{ssid: ssid} -> ssid end)
|> Enum.sort(fn
%{level: level1}, %{level: level2} -> level1 >= level2
end)
|> Enum.filter(fn %{ssid: ssid} ->
String.length(to_string(ssid)) > 0
end)
end
defp flags_to_security([:wpa2_psk_ccmp | _]), do: "WPA-PSK"
defp flags_to_security([:wpa2_psk_ccmp_tkip | _]), do: "WPA-PSK"
defp flags_to_security([:wpa_psk_ccmp | _]), do: "WPA-PSK"
defp flags_to_security([:wpa_psk_ccmp_tkip | _]), do: "WPA-PSK"
defp flags_to_security([:wpa2_eap_ccmp | _]), do: "WPA-EAP"
defp flags_to_security([_ | rest]), do: flags_to_security(rest)
defp flags_to_security([]), do: "NONE"
end
| 34.152174 | 99 | 0.651178 |
9e0b99f4ce05dc5df0a614985b0e35727675bf54 | 4,207 | ex | Elixir | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/google_cloud_orgpolicy_v1_policy.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/google_cloud_orgpolicy_v1_policy.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/google_cloud_orgpolicy_v1_policy.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudAsset.V1.Model.GoogleCloudOrgpolicyV1Policy do
@moduledoc """
Defines a Cloud Organization `Policy` which is used to specify `Constraints` for configurations of Cloud Platform resources.
## Attributes
* `booleanPolicy` (*type:* `GoogleApi.CloudAsset.V1.Model.GoogleCloudOrgpolicyV1BooleanPolicy.t`, *default:* `nil`) - For boolean `Constraints`, whether to enforce the `Constraint` or not.
* `constraint` (*type:* `String.t`, *default:* `nil`) - The name of the `Constraint` the `Policy` is configuring, for example, `constraints/serviceuser.services`. A [list of available constraints](/resource-manager/docs/organization-policy/org-policy-constraints) is available. Immutable after creation.
* `etag` (*type:* `String.t`, *default:* `nil`) - An opaque tag indicating the current version of the `Policy`, used for concurrency control. When the `Policy` is returned from either a `GetPolicy` or a `ListOrgPolicy` request, this `etag` indicates the version of the current `Policy` to use when executing a read-modify-write loop. When the `Policy` is returned from a `GetEffectivePolicy` request, the `etag` will be unset. When the `Policy` is used in a `SetOrgPolicy` method, use the `etag` value that was returned from a `GetOrgPolicy` request as part of a read-modify-write loop for concurrency control. Not setting the `etag`in a `SetOrgPolicy` request will result in an unconditional write of the `Policy`.
* `listPolicy` (*type:* `GoogleApi.CloudAsset.V1.Model.GoogleCloudOrgpolicyV1ListPolicy.t`, *default:* `nil`) - List of values either allowed or disallowed.
* `restoreDefault` (*type:* `GoogleApi.CloudAsset.V1.Model.GoogleCloudOrgpolicyV1RestoreDefault.t`, *default:* `nil`) - Restores the default behavior of the constraint; independent of `Constraint` type.
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - The time stamp the `Policy` was previously updated. This is set by the server, not specified by the caller, and represents the last time a call to `SetOrgPolicy` was made for that `Policy`. Any value set by the client will be ignored.
* `version` (*type:* `integer()`, *default:* `nil`) - Version of the `Policy`. Default version is 0;
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:booleanPolicy => GoogleApi.CloudAsset.V1.Model.GoogleCloudOrgpolicyV1BooleanPolicy.t(),
:constraint => String.t(),
:etag => String.t(),
:listPolicy => GoogleApi.CloudAsset.V1.Model.GoogleCloudOrgpolicyV1ListPolicy.t(),
:restoreDefault =>
GoogleApi.CloudAsset.V1.Model.GoogleCloudOrgpolicyV1RestoreDefault.t(),
:updateTime => DateTime.t(),
:version => integer()
}
field(:booleanPolicy, as: GoogleApi.CloudAsset.V1.Model.GoogleCloudOrgpolicyV1BooleanPolicy)
field(:constraint)
field(:etag)
field(:listPolicy, as: GoogleApi.CloudAsset.V1.Model.GoogleCloudOrgpolicyV1ListPolicy)
field(:restoreDefault, as: GoogleApi.CloudAsset.V1.Model.GoogleCloudOrgpolicyV1RestoreDefault)
field(:updateTime, as: DateTime)
field(:version)
end
defimpl Poison.Decoder, for: GoogleApi.CloudAsset.V1.Model.GoogleCloudOrgpolicyV1Policy do
def decode(value, options) do
GoogleApi.CloudAsset.V1.Model.GoogleCloudOrgpolicyV1Policy.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudAsset.V1.Model.GoogleCloudOrgpolicyV1Policy do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 63.742424 | 719 | 0.745424 |
9e0baa87b8c074aafd637c3fdde76af6171190f4 | 1,500 | ex | Elixir | clients/contact_center_insights/lib/google_api/contact_center_insights/v1/model/google_cloud_contactcenterinsights_v1_deploy_issue_model_response.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/contact_center_insights/lib/google_api/contact_center_insights/v1/model/google_cloud_contactcenterinsights_v1_deploy_issue_model_response.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/contact_center_insights/lib/google_api/contact_center_insights/v1/model/google_cloud_contactcenterinsights_v1_deploy_issue_model_response.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ContactCenterInsights.V1.Model.GoogleCloudContactcenterinsightsV1DeployIssueModelResponse do
@moduledoc """
The response to deploy an issue model.
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder,
for:
GoogleApi.ContactCenterInsights.V1.Model.GoogleCloudContactcenterinsightsV1DeployIssueModelResponse do
def decode(value, options) do
GoogleApi.ContactCenterInsights.V1.Model.GoogleCloudContactcenterinsightsV1DeployIssueModelResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.ContactCenterInsights.V1.Model.GoogleCloudContactcenterinsightsV1DeployIssueModelResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.612245 | 112 | 0.779333 |
9e0bf4836b410ee044d756fd46653b1af28c55b9 | 8,619 | ex | Elixir | lib/format/datetime/formatters/relative.ex | aforward-oss/timex | 1dc6d355c12f06b0250466b62ca001dd1dad8de0 | [
"MIT"
] | null | null | null | lib/format/datetime/formatters/relative.ex | aforward-oss/timex | 1dc6d355c12f06b0250466b62ca001dd1dad8de0 | [
"MIT"
] | null | null | null | lib/format/datetime/formatters/relative.ex | aforward-oss/timex | 1dc6d355c12f06b0250466b62ca001dd1dad8de0 | [
"MIT"
] | null | null | null | defmodule Timex.Format.DateTime.Formatters.Relative do
@moduledoc """
Relative time, based on Moment.js
Uses localized strings.
The format string should contain {relative}, which is where the phrase will be injected.
| Range | Sample Output
---------------------------------------------------------------------
| 0 to 45 seconds | a few seconds ago
| 45 to 90 seconds | a minute ago
| 90 seconds to 45 minutes | 2 minutes ago ... 45 minutes ago
| 45 to 90 minutes | an hour ago
| 90 minutes to 22 hours | 2 hours ago ... 22 hours ago
| 22 to 36 hours | a day ago
| 36 hours to 25 days | 2 days ago ... 25 days ago
| 25 to 45 days | a month ago
| 45 to 345 days | 2 months ago ... 11 months ago
| 345 to 545 days (1.5 years)| a year ago
| 546 days+ | 2 years ago ... 20 years ago
"""
use Timex.Format.DateTime.Formatter
use Combine
alias Timex.DateTime
alias Timex.Format.FormatError
alias Timex.Translator
@spec tokenize(String.t) :: {:ok, [Directive.t]} | {:error, term}
def tokenize(format_string) do
case Combine.parse(format_string, relative_parser) do
results when is_list(results) ->
directives = results |> List.flatten |> Enum.filter(fn x -> x !== nil end)
case Enum.any?(directives, fn %Directive{type: type} -> type != :literal end) do
false -> {:error, "Invalid format string, must contain at least one directive."}
true -> {:ok, directives}
end
{:error, _} = err -> err
end
end
@spec format(DateTime.t, String.t) :: {:ok, String.t} | {:error, term}
def format(date, format_string), do: lformat(date, format_string, Translator.default_locale)
@spec format!(DateTime.t, String.t) :: String.t | no_return
def format!(date, format_string), do: lformat!(date, format_string, Translator.default_locale)
@spec lformat(DateTime.t, String.t, String.t) :: {:ok, String.t} | {:error, term}
def lformat(%DateTime{:timezone => tz} = date, format_string, locale) do
case tokenize(format_string) do
{:ok, []} ->
{:error, "There were no formatting directives in the provided string."}
{:ok, dirs} when is_list(dirs) ->
do_format(locale, date, DateTime.now(tz), dirs, <<>>)
{:error, reason} -> {:error, {:format, reason}}
end
end
@spec lformat(DateTime.t, String.t, String.t) :: String.t | no_return
def lformat!(date, format_string, locale) do
case lformat(date, format_string, locale) do
{:ok, result} -> result
{:error, reason} -> raise FormatError, message: reason
end
end
def relative_to(date, relative_to, format_string) do
relative_to(date, relative_to, format_string, Translator.default_locale)
end
def relative_to(date, relative_to, format_string, locale) do
case tokenize(format_string) do
{:ok, []} ->
{:error, "There were no formatting directives in the provided string."}
{:ok, dirs} when is_list(dirs) ->
do_format(locale, date, relative_to, dirs, <<>>)
{:error, reason} -> {:error, {:format, reason}}
end
end
@minute 60
@hour @minute * 60
@day @hour * 24
@month @day * 30
@year @month * 12
defp do_format(_locale, _date, _relative, [], result), do: {:ok, result}
defp do_format(_locale, _date, _relative, _, {:error, _} = error), do: error
defp do_format(locale, date, relative, [%Directive{type: :literal, value: char} | dirs], result) when is_binary(char) do
do_format(locale, date, relative, dirs, <<result::binary, char::binary>>)
end
defp do_format(locale, date, relative, [%Directive{type: :relative} | dirs], result) do
diff = Timex.diff(date, relative, :seconds)
diff = case Timex.compare(date, relative, :seconds) do
0 -> diff
1 -> diff
-1 -> diff * -1
end
phrase = cond do
# future
diff >= 0 && diff <= 45 ->
Translator.translate_plural(locale, "relative_time", "in %{count} second", "in %{count} seconds", diff)
diff > 45 && diff < @minute * 2 ->
Translator.translate_plural(locale, "relative_time", "in %{count} minute", "in %{count} minutes", 1)
diff >= (@minute * 2) && diff < @hour ->
Translator.translate_plural(locale, "relative_time", "in %{count} minute", "in %{count} minutes", div(diff, @minute))
diff >= @hour && diff < (@hour * 2) ->
Translator.translate_plural(locale, "relative_time", "in %{count} hour", "in %{count} hours", 1)
diff >= (@hour * 2) && diff < @day ->
Translator.translate_plural(locale, "relative_time", "in %{count} hour", "in %{count} hours", div(diff, @hour))
diff >= @day && diff < (@day * 2) ->
Translator.translate_plural(locale, "relative_time", "in %{count} day", "in %{count} days", 1)
diff >= (@day * 2) && diff < @month ->
Translator.translate_plural(locale, "relative_time", "in %{count} day", "in %{count} days", div(diff, @day))
diff >= @month && diff < (@month * 2) ->
Translator.translate_plural(locale, "relative_time", "in %{count} month", "in %{count} months", 1)
diff >= (@month * 2) && diff < @year ->
Translator.translate_plural(locale, "relative_time", "in %{count} month", "in %{count} months", div(diff, @month))
diff >= @year && diff < (@year * 2) ->
Translator.translate_plural(locale, "relative_time", "in %{count} year", "in %{count} years", 1)
diff >= (@year * 2) ->
Translator.translate_plural(locale, "relative_time", "in %{count} year", "in %{count} years", div(diff, @year))
# past
diff <= 0 && diff >= -45 ->
Translator.translate_plural(locale, "relative_time", "%{count} second ago", "%{count} seconds ago", diff * -1)
diff < -45 && diff > @minute * 2 * -1 ->
Translator.translate_plural(locale, "relative_time", "%{count} minute ago", "%{count} minutes ago", -1)
diff <= (@minute * 2) && diff > @hour * -1 ->
Translator.translate_plural(locale, "relative_time", "%{count} minute ago", "%{count} minutes ago", div(diff * -1, @minute))
diff <= @hour && diff > (@hour * 2 * -1) ->
Translator.translate_plural(locale, "relative_time", "%{count} hour ago", "%{count} hours ago", 1)
diff <= (@hour * 2) && diff > (@day * -1) ->
Translator.translate_plural(locale, "relative_time", "%{count} hour ago", "%{count} hours ago", div(diff * -1, @hour))
diff <= @day && diff > (@day * 2 * -1) ->
Translator.translate_plural(locale, "relative_time", "%{count} day ago", "%{count} days ago", 1)
diff <= (@day * 2) && diff > (@month * -1) ->
Translator.translate_plural(locale, "relative_time", "%{count} day ago", "%{count} days ago", div(diff * -1, @day))
diff <= @month && diff > (@month * 2 * -1) ->
Translator.translate_plural(locale, "relative_time", "%{count} month ago", "%{count} months ago", 1)
diff <= (@month * 2) && diff > (@year * -1) ->
Translator.translate_plural(locale, "relative_time", "%{count} month ago", "%{count} months ago", div(diff * -1, @month))
diff <= @year && diff > (@year * 2 * -1) ->
Translator.translate_plural(locale, "relative_time", "%{count} year ago", "%{count} years ago", 1)
diff <= (@year * 2 * -1) ->
Translator.translate_plural(locale, "relative_time", "%{count} year ago", "%{count} years ago", div(diff * -1, @year))
end
do_format(locale, date, relative, dirs, <<result::binary, phrase::binary>>)
end
defp do_format(locale, date, relative, [%Directive{type: type, modifiers: mods, flags: flags, width: width} | dirs], result) do
case format_token(locale, type, date, mods, flags, width) do
{:error, _} = err -> err
formatted -> do_format(locale, date, relative, dirs, <<result::binary, formatted::binary>>)
end
end
# Token parser
defp relative_parser do
many1(choice([
between(char(?{), map(one_of(word, ["relative"]), &map_directive/1), char(?})),
map(none_of(char, ["{", "}"]), &map_literal/1)
]))
end
# Gets/builds the Directives for a given token
defp map_directive("relative"),
do: %Directive{:type => :relative, :value => "relative"}
# Generates directives for literal characters
defp map_literal([]), do: nil
defp map_literal(literals)
when is_list(literals), do: Enum.map(literals, &map_literal/1)
defp map_literal(literal), do: %Directive{type: :literal, value: literal, parser: char(literal)}
end
| 48.971591 | 132 | 0.603202 |
9e0c1f092ab16d264a0844a2053ed4b3ea5a7f3a | 1,766 | exs | Elixir | kv_umbrella/apps/kv/test/registry_test.exs | guthb/fp | 2a754adfcf2eb60e80016a23de81fad83993bafe | [
"MIT"
] | null | null | null | kv_umbrella/apps/kv/test/registry_test.exs | guthb/fp | 2a754adfcf2eb60e80016a23de81fad83993bafe | [
"MIT"
] | null | null | null | kv_umbrella/apps/kv/test/registry_test.exs | guthb/fp | 2a754adfcf2eb60e80016a23de81fad83993bafe | [
"MIT"
] | null | null | null | defmodule KV.RegistryTest do
use ExUnit.Case, async: true
setup context do
_ = start_supervised!({KV.Registry, name: context.test})
%{registry: registry}
end
test "spawns buckets", %{registry: registry} do
assert KV.Registry.lookup(registry, "shopping") == :error
KV.Registry.create(registry, "shopping")
assert {:ok, bucket} = KV.Registry.lookup(registry, "shopping")
KV.Bucket.put(bucket, "milk", 1)
assert KV.Bucket.get(bucket, "milk") == 1
end
test "removes buckets on exit", %{registry: registry} do
KV.Registry.create(registry, "shopping")
{:ok, bucket} = KV.Registry.lookup(registry, "shopping")
Agent.stop(bucket)
# assert KV.Registry.lookup(registry, "shopping") == :error
# end
# Do a call to ensure the registry processed the DOWN message
_ = KV.Registry.create(registry, "bogus")
assert KV.Registry.lookup(registry, "shopping") == :error
end
test "removes bucket on crash", %{registry: registry} do
KV.Registry.create(registry, "shopping")
{:ok, bucket} = KV.Registry.lookup(registry, "shopping")
# Stop the bucket with non-normal reason
Agent.stop(bucket, :shutdown)
# Do a call to ensure the registry processed the DOWN message
_ = KV.Registry.create(registry, "bogus")
assert KV.Registry.lookup(registry, "shopping") == :error
end
test "bucket can crash at any time", %{registry: registry} do
KV.Registry.create(registry, "shopping")
{:ok, bucket} = KV.Registry.lookup(registry, "shopping")
# Simulate a bucket crash by explicitly and synchronously shutting it down
Agent.stop(bucket, :shutdown)
# Now trying to call the dead process causes a :noproc exit
catch_exit KV.Bucket.put(bucket, "milk", 3)
end
end
| 32.109091 | 78 | 0.68573 |
9e0c262fa659b2d66f69c11810da59765beb88e0 | 507 | ex | Elixir | lib/events/users/user.ex | jacrdn/web_dev_hw08 | 1727810bb56a9a30733cb93cd77a8db7f5f747ed | [
"MIT"
] | null | null | null | lib/events/users/user.ex | jacrdn/web_dev_hw08 | 1727810bb56a9a30733cb93cd77a8db7f5f747ed | [
"MIT"
] | null | null | null | lib/events/users/user.ex | jacrdn/web_dev_hw08 | 1727810bb56a9a30733cb93cd77a8db7f5f747ed | [
"MIT"
] | null | null | null | defmodule Events.Users.User do
use Ecto.Schema
import Ecto.Changeset
schema "users" do
field :name, :string
field :email, :string
field :photo_hash, :string
has_many :posts, Events.Posts.Post
has_many :comments, Events.Comments.Comment
has_many :responses, Events.Responses.Response
timestamps()
end
@doc false
def changeset(user, attrs) do
user
|> cast(attrs, [:name, :email, :photo_hash])
|> validate_required([:name, :email, :photo_hash])
end
end
| 22.043478 | 54 | 0.682446 |
9e0c357d7bbcb08716d8f1d5d93082fff3db918d | 29,159 | ex | Elixir | lib/pulsar_ex/connection.ex | blueshift-labs/pulsar_ex | 8fd5b6b7fa98b172645bce56dd3a46395935e2e0 | [
"MIT"
] | 3 | 2021-04-16T01:54:20.000Z | 2022-01-09T23:26:56.000Z | lib/pulsar_ex/connection.ex | blueshift-labs/pulsar_ex | 8fd5b6b7fa98b172645bce56dd3a46395935e2e0 | [
"MIT"
] | null | null | null | lib/pulsar_ex/connection.ex | blueshift-labs/pulsar_ex | 8fd5b6b7fa98b172645bce56dd3a46395935e2e0 | [
"MIT"
] | null | null | null | defmodule PulsarEx.Connection do
defmodule State do
@enforce_keys [
:broker,
:broker_name,
:last_request_id,
:last_producer_id,
:last_consumer_id,
:requests,
:producers,
:consumers,
:buffer,
:metadata
]
defstruct [
:broker,
:broker_name,
:last_request_id,
:last_producer_id,
:last_consumer_id,
:requests,
:producers,
:consumers,
:buffer,
:metadata,
:socket,
:max_message_size,
:last_server_ts
]
end
use Connection
import PulsarEx.IO
require Logger
alias PulsarEx.{Broker, ProducerMessage}
alias PulsarEx.Proto.{
CommandConnect,
CommandConnected,
CommandPing,
CommandPong,
CommandProducer,
CommandProducerSuccess,
CommandCloseProducer,
CommandSuccess,
CommandError,
CommandCloseConsumer,
CommandSendReceipt,
CommandSendError,
CommandSubscribe,
CommandFlow,
CommandAck,
CommandAckResponse,
CommandRedeliverUnacknowledgedMessages,
CommandMessage,
CommandActiveConsumerChange,
MessageIdData
}
@client_version "PulsarEx #{Mix.Project.config()[:version]}"
@protocol_version 13
@connection_timeout 5000
@ping_interval 45_000
@request_timeout 60_000
def create_producer(conn, topic, opts \\ []) do
GenServer.call(conn, {:create_producer, topic, opts}, @request_timeout)
end
def subscribe(conn, topic, subscription, sub_type, opts \\ []) do
GenServer.call(conn, {:subscribe, topic, subscription, sub_type, opts}, @request_timeout)
end
def send_message(conn, producer_id, sequence_id, %ProducerMessage{} = message, timeout) do
GenServer.call(conn, {:send, producer_id, sequence_id, message}, timeout)
end
def send_messages(conn, producer_id, sequence_id, messages, timeout) when is_list(messages) do
GenServer.call(conn, {:send, producer_id, sequence_id, messages}, timeout)
end
def flow_permits(conn, consumer_id, permits) do
GenServer.call(conn, {:flow_permits, consumer_id, permits}, @request_timeout)
end
def redeliver(conn, consumer_id, msg_ids) do
GenServer.call(conn, {:redeliver, consumer_id, msg_ids}, @request_timeout)
end
def ack(conn, consumer_id, ack_type, msg_ids) do
GenServer.call(conn, {:ack, consumer_id, ack_type, msg_ids}, @request_timeout)
end
def start_link(%Broker{} = broker) do
Connection.start_link(__MODULE__, broker)
end
@impl true
def init(broker) do
Logger.debug("Starting connection to broker #{Broker.to_name(broker)}")
Process.flag(:trap_exit, true)
state = %State{
broker: broker,
broker_name: Broker.to_name(broker),
last_request_id: -1,
last_producer_id: -1,
last_consumer_id: -1,
requests: %{},
producers: %{},
consumers: %{},
buffer: <<>>,
metadata: %{broker: Broker.to_name(broker)}
}
{:connect, :init, state}
end
@impl true
def connect(:init, %{broker: broker} = state) do
with {:ok, socket} <- do_connect(broker.host, broker.port),
{:ok, max_message_size} <- do_handshake(socket) do
:inet.setopts(socket, active: :once)
Process.send_after(self(), :send_ping, @ping_interval)
Logger.debug("Connection established to broker #{state.broker_name}")
:telemetry.execute(
[:pulsar_ex, :connection, :success],
%{count: 1},
state.metadata
)
{:ok,
%{
state
| socket: socket,
last_server_ts: System.monotonic_time(:millisecond),
max_message_size: max_message_size
}}
else
{:error, _} = err ->
:telemetry.execute(
[:pulsar_ex, :connection, :error],
%{count: 1},
state.metadata
)
{:stop, err, state}
end
end
@impl true
def disconnect(err, state) do
Logger.error("Disconnecting from broker #{state.broker_name}, #{inspect(err)}")
:gen_tcp.close(state.socket)
{:stop, err, state}
end
@impl true
def terminate(reason, state) do
case reason do
:normal ->
Logger.debug("Closing connection from broker #{state.broker_name}}, #{inspect(reason)}")
:shutdown ->
Logger.debug("Closing connection from broker #{state.broker_name}}, #{inspect(reason)}")
{:shutdown, _} ->
Logger.debug("Closing connection from broker #{state.broker_name}}, #{inspect(reason)}")
_ ->
Logger.error("Closing connection from broker #{state.broker_name}}, #{inspect(reason)}")
:telemetry.execute(
[:pulsar_ex, :connection, :exit],
%{count: 1},
state.metadata
)
end
state
end
defp do_connect(host, port) do
socket_opts = Application.get_env(:pulsar_ex, :socket_opts, []) |> optimize_socket_opts()
connection_timeout = Application.get_env(:pulsar_ex, :connection_timeout, @connection_timeout)
:gen_tcp.connect(to_charlist(host), port, socket_opts, connection_timeout)
end
defp do_handshake(socket) do
command =
CommandConnect.new(
client_version: @client_version,
protocol_version: @protocol_version
)
with :ok <- :gen_tcp.send(socket, encode_command(command)),
{:ok, data} <- :gen_tcp.recv(socket, 0),
{[{%CommandConnected{} = connected, _}], _} <- decode(data) do
{:ok, connected.max_message_size}
else
_ ->
{:error, :handshake}
end
end
defp optimize_socket_opts(socket_opts) do
socket_opts =
socket_opts
|> Enum.reject(fn
:binary -> true
{:nodelay, _} -> true
{:active, _} -> true
{:keepalive, _} -> true
end)
[:binary, nodelay: true, active: false, keepalive: true] ++ socket_opts
end
# ================== handle_call! =====================
@impl true
def handle_call({:create_producer, topic, opts}, from, state) do
Logger.debug("Creating producer on broker #{state.broker_name}")
request =
CommandProducer.new(
request_id: state.last_request_id + 1,
producer_id: state.last_producer_id + 1,
topic: topic
)
state = %{state | last_request_id: request.request_id, last_producer_id: request.producer_id}
request =
case Keyword.get(opts, :producer_name) do
nil ->
request
producer_name ->
%{request | producer_name: producer_name, user_provided_producer_name: true}
end
request =
case Keyword.get(opts, :producer_access_mode) do
nil ->
request
mode ->
%{request | producer_access_mode: producer_access_mode(mode)}
end
request = %{request | metadata: Keyword.get(opts, :properties) |> to_kv()}
case :gen_tcp.send(state.socket, encode_command(request)) do
:ok ->
requests =
Map.put(
state.requests,
{:request_id, request.request_id},
{from, System.monotonic_time(:millisecond), request}
)
{:noreply, %{state | requests: requests}}
{:error, _} = err ->
:telemetry.execute(
[:pulsar_ex, :connection, :create_producer, :error],
%{count: 1},
state.metadata
)
{:disconnect, err, err, state}
end
end
@impl true
def handle_call({:subscribe, topic, subscription, sub_type, opts}, from, state) do
Logger.debug(
"Subscribing consumer to topic #{topic} with subscription #{subscription} in #{sub_type} mode, on broker #{
state.broker_name
}"
)
request =
CommandSubscribe.new(
request_id: state.last_request_id + 1,
consumer_id: state.last_consumer_id + 1,
topic: topic,
subscription: subscription,
subType: subscription_type(sub_type)
)
state = %{state | last_request_id: request.request_id, last_consumer_id: request.consumer_id}
request =
case Keyword.get(opts, :consumer_name) do
nil -> request
consumer_name -> %{request | consumer_name: consumer_name}
end
request =
case Keyword.get(opts, :priority_level) do
nil -> request
priority_level -> %{request | priority_level: priority_level}
end
request =
case Keyword.get(opts, :durable) do
nil -> request
durable -> %{request | durable: durable}
end
request =
case Keyword.get(opts, :read_compacted) do
nil -> request
read_compacted -> %{request | read_compacted: read_compacted}
end
request =
case Keyword.get(opts, :force_topic_creation) do
nil -> request
force_topic_creation -> %{request | force_topic_creation: force_topic_creation}
end
request =
case Keyword.get(opts, :initial_position) do
nil -> request
init_position -> %{request | initialPosition: initial_position(init_position)}
end
request = %{request | metadata: Keyword.get(opts, :properties) |> to_kv()}
case :gen_tcp.send(state.socket, encode_command(request)) do
:ok ->
requests =
Map.put(
state.requests,
{:request_id, request.request_id},
{from, System.monotonic_time(:millisecond), request}
)
{:noreply, %{state | requests: requests}}
{:error, _} = err ->
:telemetry.execute(
[:pulsar_ex, :connection, :subscribe, :error],
%{count: 1},
state.metadata
)
{:disconnect, err, err, state}
end
end
@impl true
def handle_call({:flow_permits, consumer_id, permits}, _from, state) do
Logger.debug(
"Sending Flow with #{permits} permits to broker #{state.broker_name} for consumer #{
consumer_id
}"
)
command =
CommandFlow.new(
consumer_id: consumer_id,
messagePermits: permits
)
case :gen_tcp.send(state.socket, encode_command(command)) do
:ok ->
:telemetry.execute(
[:pulsar_ex, :connection, :flow_permits, :success],
%{count: 1},
state.metadata
)
{:reply, :ok, state}
{:error, _} = err ->
:telemetry.execute(
[:pulsar_ex, :connection, :flow_permits, :error],
%{count: 1},
state.metadata
)
{:disconnect, err, err, state}
end
end
@impl true
def handle_call({:redeliver, consumer_id, msg_ids}, _from, state) when is_list(msg_ids) do
Logger.debug(
"Sending #{length(msg_ids)} redeliver to broker #{state.broker_name} for consumer #{
consumer_id
}"
)
message_ids =
Enum.map(msg_ids, fn {ledgerId, entryId} ->
MessageIdData.new(ledgerId: ledgerId, entryId: entryId)
end)
command =
CommandRedeliverUnacknowledgedMessages.new(
consumer_id: consumer_id,
message_ids: message_ids
)
case :gen_tcp.send(state.socket, encode_command(command)) do
:ok ->
:telemetry.execute(
[:pulsar_ex, :connection, :redeliver, :success],
%{count: 1},
state.metadata
)
{:reply, :ok, state}
{:error, _} = err ->
:telemetry.execute(
[:pulsar_ex, :connection, :redeliver, :error],
%{count: 1},
state.metadata
)
{:disconnect, err, err, state}
end
end
@impl true
def handle_call({:ack, consumer_id, ack_type, msg_ids}, _from, state)
when is_list(msg_ids) do
Logger.debug(
"Sending #{length(msg_ids)} acks to broker #{state.broker_name} for consumer #{consumer_id}"
)
message_ids =
Enum.map(msg_ids, fn {ledgerId, entryId} ->
MessageIdData.new(ledgerId: ledgerId, entryId: entryId)
end)
request =
CommandAck.new(
request_id: state.last_request_id + 1,
consumer_id: consumer_id,
ack_type: ack_type(ack_type),
message_id: message_ids,
txnid_least_bits: nil,
txnid_most_bits: nil
)
state = %{state | last_request_id: request.request_id}
case :gen_tcp.send(state.socket, encode_command(request)) do
:ok ->
:telemetry.execute(
[:pulsar_ex, :connection, :ack, :success],
%{count: 1},
state.metadata
)
requests =
Map.put(
state.requests,
{:request_id, request.request_id},
{nil, System.monotonic_time(:millisecond), request}
)
{:reply, :ok, %{state | requests: requests}}
{:error, _} = err ->
:telemetry.execute(
[:pulsar_ex, :connection, :ack, :error],
%{count: 1},
state.metadata
)
{:disconnect, err, err, state}
end
end
@impl true
def handle_call(
{:send, producer_id, sequence_id, messages},
from,
state
)
when is_list(messages) do
Logger.debug(
"Producing #{length(messages)} messages in batch to broker #{state.broker_name} for producer #{
producer_id
}"
)
request = encode_messages(messages)
case :gen_tcp.send(state.socket, request) do
:ok ->
requests =
Map.put(
state.requests,
{:sequence_id, producer_id, sequence_id},
{from, System.monotonic_time(:millisecond), request}
)
{:noreply, %{state | requests: requests}}
{:error, _} = err ->
:telemetry.execute(
[:pulsar_ex, :connection, :send, :error],
%{count: 1},
state.metadata
)
{:disconnect, err, err, state}
end
end
@impl true
def handle_call(
{:send, producer_id, sequence_id, message},
from,
state
) do
Logger.debug("Producing message to broker #{state.broker_name} for producer #{producer_id}")
request = encode_message(message)
case :gen_tcp.send(state.socket, request) do
:ok ->
requests =
Map.put(
state.requests,
{:sequence_id, producer_id, sequence_id},
{from, System.monotonic_time(:millisecond), request}
)
{:noreply, %{state | requests: requests}}
{:error, _} = err ->
:telemetry.execute(
[:pulsar_ex, :connection, :send, :error],
%{count: 1},
state.metadata
)
{:disconnect, err, err, state}
end
end
# ================== handle_info! =====================
@impl true
def handle_info({:tcp_passive, _}, state), do: {:noreply, state}
@impl true
def handle_info({:tcp_closed, _}, state), do: {:disconnect, {:error, :closed}, state}
@impl true
def handle_info({:tcp_error, _, reason}, state), do: {:disconnect, {:error, reason}, state}
@impl true
def handle_info({:tcp, socket, data}, state) do
Logger.debug("Receiving data from broker #{state.broker_name}")
{messages, buffer} = decode(<<state.buffer::binary, data::binary>>)
# handle tcp messages other than consumer messages
state =
messages
|> Enum.reduce(state, fn
{command, payload}, acc -> handle_command(command, payload, acc)
end)
# now bundle the consumer messages to consumers
messages
|> Enum.filter(&match?({%CommandMessage{}, _}, &1))
|> Enum.reduce(%{}, fn {command, msgs}, acc ->
Map.merge(acc, %{command.consumer_id => msgs}, fn _, m1, m2 -> m1 ++ m2 end)
end)
|> Enum.each(fn {consumer_id, msgs} ->
Logger.debug(
"Received #{length(msgs)} messages from broker #{state.broker_name} for consumer #{
consumer_id
}"
)
case Map.get(state.consumers, consumer_id) do
{pid, _} ->
GenServer.cast(pid, {:messages, msgs})
nil ->
Logger.error(
"Received #{length(msgs)} unexpected messages from broker #{state.broker_name} for consumer #{
consumer_id
}"
)
end
end)
:inet.setopts(socket, active: :once)
{:noreply, %{state | buffer: buffer, last_server_ts: System.monotonic_time(:millisecond)}}
end
@impl true
def handle_info(:send_ping, state) do
Logger.debug("Sending Ping to broker #{state.broker_name}")
cond do
System.monotonic_time(:millisecond) - state.last_server_ts > 2 * @ping_interval ->
{:disconnect, {:error, :closed}, state}
true ->
case :gen_tcp.send(state.socket, encode_command(CommandPing.new())) do
:ok ->
Process.send_after(self(), :send_ping, @ping_interval)
{:noreply, state}
{:error, _} = err ->
{:disconnect, err, state}
end
end
end
@impl true
def handle_info(:send_pong, state) do
Logger.debug("Sending Pong to broker #{state.broker_name}")
case :gen_tcp.send(state.socket, encode_command(CommandPong.new())) do
:ok -> {:noreply, state}
{:error, _} = err -> {:disconnect, err, state}
end
end
@impl true
def handle_info({:DOWN, _, _, pid, reason}, state) do
producer = state.producers |> Enum.find(&match?({_, {^pid, _}}, &1))
consumer = state.consumers |> Enum.find(&match?({_, {^pid, _}}, &1))
case {producer, consumer} do
{{producer_id, {_, ref}}, nil} ->
if exception?(reason) do
Logger.error(
"Closing producer #{producer_id} on broker #{state.broker_name}, #{inspect(reason)}"
)
else
Logger.info(
"Closing producer #{producer_id} on broker #{state.broker_name}, #{inspect(reason)}"
)
end
Process.demonitor(ref)
producers = Map.delete(state.producers, producer_id)
request =
CommandCloseProducer.new(
request_id: state.last_request_id + 1,
producer_id: producer_id
)
state = %{state | last_request_id: request.request_id, producers: producers}
case :gen_tcp.send(state.socket, encode_command(request)) do
:ok ->
requests =
Map.put(
state.requests,
{:request_id, request.request_id},
{nil, System.monotonic_time(:millisecond), request}
)
{:noreply, %{state | requests: requests}}
{:error, _} = err ->
{:disconnect, err, state}
end
{nil, {consumer_id, {_, ref}}} ->
if exception?(reason) do
Logger.error(
"Stopping consumer #{consumer_id} on broker #{state.broker_name}, #{inspect(reason)}"
)
else
Logger.info(
"Stopping consumer #{consumer_id} on broker #{state.broker_name}, #{inspect(reason)}"
)
end
Process.demonitor(ref)
consumers = Map.delete(state.consumers, consumer_id)
request =
CommandCloseConsumer.new(
request_id: state.last_request_id + 1,
consumer_id: consumer_id
)
state = %{state | last_request_id: request.request_id, consumers: consumers}
case :gen_tcp.send(state.socket, encode_command(request)) do
:ok ->
requests =
Map.put(
state.requests,
{:request_id, request.request_id},
{nil, System.monotonic_time(:millisecond), request}
)
{:noreply, %{state | requests: requests}}
{:error, _} = err ->
{:disconnect, err, state}
end
{nil, nil} ->
Logger.error(
"Detected unexpected process down on broker #{state.broker_name}, #{inspect(reason)}"
)
{:noreply, state}
end
end
# ================== handle_command! =====================
defp handle_command(%CommandPing{}, _, state) do
Logger.debug("Received Ping from broker #{state.broker_name}")
Process.send(self(), :send_pong, [])
state
end
defp handle_command(%CommandPong{}, _, state) do
Logger.debug("Received Pong from broker #{state.broker_name}")
state
end
# we don't want to handle consumer message here, we will bundle them to consumers
defp handle_command(%CommandMessage{}, _, state), do: state
defp handle_command(%CommandCloseProducer{producer_id: producer_id}, _, state) do
Logger.warn(
"Received CloseProducer from broker #{state.broker_name} for producer #{producer_id}"
)
{producer, producers} = Map.pop(state.producers, producer_id)
case producer do
{pid, ref} ->
Process.demonitor(ref)
GenServer.cast(pid, :close)
nil ->
Logger.error("Producer #{producer_id} is already terminated")
end
%{state | producers: producers}
end
defp handle_command(%CommandCloseConsumer{consumer_id: consumer_id}, _, state) do
Logger.warn(
"Received CloseConsumer from broker #{state.broker_name} for consumer #{consumer_id}"
)
{consumer, consumers} = Map.pop(state.consumers, consumer_id)
case consumer do
{pid, ref} ->
Process.demonitor(ref)
GenServer.cast(pid, :close)
nil ->
Logger.error("Consumer #{consumer_id} is already terminated")
end
%{state | consumers: consumers}
end
defp handle_command(%CommandProducerSuccess{producer_ready: true} = response, _, state) do
{{pid, _} = from, ts, request} = Map.get(state.requests, {:request_id, response.request_id})
duration = System.monotonic_time(:millisecond) - ts
Logger.debug(
"Created producer #{request.producer_id} on broker #{state.broker_name} after #{duration}ms"
)
requests = Map.delete(state.requests, {:request_id, response.request_id})
reply = %{
producer_id: request.producer_id,
producer_name: response.producer_name,
last_sequence_id: response.last_sequence_id,
max_message_size: state.max_message_size,
producer_access_mode: request.producer_access_mode,
properties: from_kv(request.metadata)
}
GenServer.reply(from, {:ok, reply})
:telemetry.execute(
[:pulsar_ex, :connection, :create_producer, :success],
%{count: 1, duration: duration},
state.metadata
)
ref = Process.monitor(pid)
producers = Map.put(state.producers, request.producer_id, {pid, ref})
%{state | requests: requests, producers: producers}
end
defp handle_command(%CommandProducerSuccess{} = response, _, state) do
{_, ts, request} = Map.get(state.requests, {:request_id, response.request_id})
duration = System.monotonic_time(:millisecond) - ts
Logger.warn(
"Producer #{request.producer_id} not ready on broker #{state.broker_name}, after #{duration}ms"
)
state
end
defp handle_command(%CommandSuccess{} = response, _, state) do
{request_info, requests} = Map.pop(state.requests, {:request_id, response.request_id})
state = %{state | requests: requests}
case request_info do
{{pid, _} = from, ts, %CommandSubscribe{} = request} ->
duration = System.monotonic_time(:millisecond) - ts
Logger.debug(
"Subscribed consumer #{request.consumer_id} on broker #{state.broker_name}, after #{
duration
}ms"
)
reply = %{
consumer_id: request.consumer_id,
consumer_name: request.consumer_name,
subscription_type: request.subType,
priority_level: request.priority_level,
read_compacted: request.read_compacted,
initial_position: request.initialPosition,
properties: from_kv(request.metadata)
}
GenServer.reply(from, {:ok, reply})
:telemetry.execute(
[:pulsar_ex, :connection, :subscribe, :success],
%{count: 1, duration: duration},
state.metadata
)
ref = Process.monitor(pid)
consumers = Map.put(state.consumers, request.consumer_id, {pid, ref})
%{state | consumers: consumers}
{nil, ts, %CommandCloseProducer{producer_id: producer_id}} ->
duration = System.monotonic_time(:millisecond) - ts
Logger.debug(
"Stopped producer #{producer_id} from broker #{state.broker_name}, after #{duration}ms"
)
state
{nil, ts, %CommandCloseConsumer{consumer_id: consumer_id}} ->
duration = System.monotonic_time(:millisecond) - ts
Logger.debug(
"Stopped consumer #{consumer_id} from broker #{state.broker_name}, after #{duration}ms"
)
state
end
end
defp handle_command(%CommandError{error: err} = response, _, state) do
{request_info, requests} = Map.pop(state.requests, {:request_id, response.request_id})
state = %{state | requests: requests}
case request_info do
{from, ts, %CommandProducer{} = request} ->
duration = System.monotonic_time(:millisecond) - ts
Logger.error(
"Error connecting producer #{request.producer_id} on broker #{state.broker_name}, after #{
duration
}ms, #{inspect(err)}"
)
GenServer.reply(from, {:error, err})
:telemetry.execute(
[:pulsar_ex, :connection, :create_producer, :error],
%{count: 1},
state.metadata
)
state
{from, ts, %CommandSubscribe{} = request} ->
duration = System.monotonic_time(:millisecond) - ts
Logger.error(
"Error subscribing to topic #{request.topic} for consumer #{request.consumer_id} on broker #{
state.broker_name
}, after #{duration}ms, #{inspect(err)}"
)
GenServer.reply(from, {:error, err})
:telemetry.execute(
[:pulsar_ex, :connection, :subscribe, :error],
%{count: 1},
state.metadata
)
state
{nil, ts, %CommandCloseProducer{producer_id: producer_id}} ->
duration = System.monotonic_time(:millisecond) - ts
Logger.error(
"Error stopping producer #{producer_id} from broker #{state.broker_name}, after #{
duration
}ms, #{inspect(err)}"
)
state
{nil, ts, %CommandCloseConsumer{consumer_id: consumer_id}} ->
duration = System.monotonic_time(:millisecond) - ts
Logger.error(
"Error stopping consumer #{consumer_id} from broker #{state.broker_name}, after #{
duration
}ms, #{inspect(err)}"
)
state
end
end
defp handle_command(%CommandSendReceipt{} = response, _, state) do
{{from, ts, _}, requests} =
Map.pop(state.requests, {:sequence_id, response.producer_id, response.sequence_id})
state = %{state | requests: requests}
duration = System.monotonic_time(:millisecond) - ts
Logger.debug(
"Received Send Receipt from broker #{state.broker_name} for producer #{response.producer_id}, after #{
duration
}ms"
)
GenServer.reply(from, {:ok, response.message_id})
:telemetry.execute(
[:pulsar_ex, :connection, :send, :success],
%{count: 1, duration: duration},
state.metadata
)
state
end
defp handle_command(%CommandSendError{error: err} = response, _, state) do
{{from, ts, _}, requests} =
Map.pop(state.requests, {:sequence_id, response.producer_id, response.sequence_id})
state = %{state | requests: requests}
duration = System.monotonic_time(:millisecond) - ts
Logger.error(
"Received Send Error from broker #{state.broker_name} for producer #{response.producer_id}, #{
inspect(err)
}, after #{duration}ms"
)
GenServer.reply(from, {:error, err})
:telemetry.execute(
[:pulsar_ex, :connection, :send, :error],
%{count: 1},
state.metadata
)
state
end
defp handle_command(
%CommandAckResponse{request_id: request_id, error: nil} = response,
_,
state
) do
{{nil, ts, request}, requests} = Map.pop(state.requests, {:request_id, request_id})
state = %{state | requests: requests}
duration = System.monotonic_time(:millisecond) - ts
Logger.debug(
"Received Ack Response from broker #{state.broker_name} for consumer #{request.consumer_id}, #{
inspect(duration)
}ms, #{inspect(response)}"
)
state
end
defp handle_command(%CommandAckResponse{request_id: request_id} = response, _, state) do
{{nil, ts, request}, requests} = Map.pop(state.requests, {:request_id, request_id})
state = %{state | requests: requests}
duration = System.monotonic_time(:millisecond) - ts
Logger.error(
"Received Ack Error from broker #{state.broker_name} for consumer #{request.consumer_id}, #{
inspect(response)
}, after #{duration}ms"
)
state
end
defp handle_command(%CommandActiveConsumerChange{}, _, state) do
Logger.debug("Consumer status changed")
state
end
defp exception?(:normal), do: false
defp exception?(:shutdown), do: false
defp exception?({:shutdown, _}), do: false
defp exception?(_), do: true
end
| 27.744053 | 113 | 0.60417 |
9e0c372e0110fa2781caa0501a233d0f85300db1 | 414 | ex | Elixir | unix_bridge/lib/unix_bridge/checker.ex | Cleawing/elixir-projects | 6562790191536f317aa0b673c265abb604dd2833 | [
"Apache-2.0"
] | null | null | null | unix_bridge/lib/unix_bridge/checker.ex | Cleawing/elixir-projects | 6562790191536f317aa0b673c265abb604dd2833 | [
"Apache-2.0"
] | null | null | null | unix_bridge/lib/unix_bridge/checker.ex | Cleawing/elixir-projects | 6562790191536f317aa0b673c265abb604dd2833 | [
"Apache-2.0"
] | null | null | null | # defmodule UnixBridge.Checker do
# @spec config(UnixBridge.Config.t) :: {:ok, UnixBridge.Config.t} | {:error, String.t}
# def config(config) do
# end
#
# @spec tcp(String.t) :: {:ok, String.t} | {:error, String.t}
# def tcp(binded_to) do
# end
#
# @spec http(String.t, String.t, boolean) :: {:ok, String.t} | {:error, String.t}
# def http(binded_to, uri \\ "/", secured \\ false) do
# end
# end
| 29.571429 | 88 | 0.603865 |
9e0c608b844ba7f7adcf196a00ab0e453b974ac0 | 266 | exs | Elixir | .formatter.exs | maartenvanvliet/codex | 4026a5bc64f5aafd94ce21ee4ec922ea79af49c9 | [
"MIT"
] | 3 | 2021-05-16T14:13:18.000Z | 2021-05-18T18:51:06.000Z | .formatter.exs | maartenvanvliet/codex | 4026a5bc64f5aafd94ce21ee4ec922ea79af49c9 | [
"MIT"
] | 11 | 2021-09-22T04:15:16.000Z | 2022-03-24T04:14:17.000Z | .formatter.exs | maartenvanvliet/codex | 4026a5bc64f5aafd94ce21ee4ec922ea79af49c9 | [
"MIT"
] | null | null | null | # Used by "mix format"
locals_without_parens = [step: 1, step: 2, params: 1]
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"],
locals_without_parens: locals_without_parens,
export: [
locals_without_parens: locals_without_parens
]
]
| 24.181818 | 70 | 0.684211 |
9e0c68b268c91ccd24b5dd5ff25fffa6df9f1661 | 26 | exs | Elixir | example/main.exs | tobegit3hub/lambda-docker | 352701b338735af9fa8a516900896e884febe99b | [
"MIT"
] | 343 | 2015-12-19T09:48:37.000Z | 2022-03-30T03:20:52.000Z | example/main.exs | tobegit3hub/lambda-docker | 352701b338735af9fa8a516900896e884febe99b | [
"MIT"
] | 5 | 2015-12-26T07:25:11.000Z | 2017-01-05T02:35:57.000Z | example/main.exs | tobegit3hub/lambda-docker | 352701b338735af9fa8a516900896e884febe99b | [
"MIT"
] | 47 | 2016-01-03T06:28:00.000Z | 2021-01-12T07:44:38.000Z | IO.puts "Run Elixir code"
| 13 | 25 | 0.730769 |
9e0c814f002534b7db8ee0660ab33515718591db | 2,369 | ex | Elixir | clients/ad_mob/lib/google_api/ad_mob/v1/model/date.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/ad_mob/lib/google_api/ad_mob/v1/model/date.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/ad_mob/lib/google_api/ad_mob/v1/model/date.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdMob.V1.Model.Date do
@moduledoc """
Represents a whole or partial calendar date, such as a birthday. The time of day and time zone are either specified elsewhere or are insignificant. The date is relative to the Gregorian Calendar. This can represent one of the following: * A full date, with non-zero year, month, and day values * A month and day value, with a zero year, such as an anniversary * A year on its own, with zero month and day values * A year and month value, with a zero day, such as a credit card expiration date Related types are google.type.TimeOfDay and `google.protobuf.Timestamp`.
## Attributes
* `day` (*type:* `integer()`, *default:* `nil`) - Day of a month. Must be from 1 to 31 and valid for the year and month, or 0 to specify a year by itself or a year and month where the day isn't significant.
* `month` (*type:* `integer()`, *default:* `nil`) - Month of a year. Must be from 1 to 12, or 0 to specify a year without a month and day.
* `year` (*type:* `integer()`, *default:* `nil`) - Year of the date. Must be from 1 to 9999, or 0 to specify a date without a year.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:day => integer() | nil,
:month => integer() | nil,
:year => integer() | nil
}
field(:day)
field(:month)
field(:year)
end
defimpl Poison.Decoder, for: GoogleApi.AdMob.V1.Model.Date do
def decode(value, options) do
GoogleApi.AdMob.V1.Model.Date.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdMob.V1.Model.Date do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 44.698113 | 567 | 0.710426 |
9e0c8e079b8770af15c9b11ca251ccad986ce04a | 1,274 | ex | Elixir | lib/blue_heron/att/responses/find_by_type_value_response.ex | amclain/blue_heron | e1802097ef6a845e28a8be56076f3b81ebb56206 | [
"Apache-2.0"
] | 45 | 2020-10-17T13:34:15.000Z | 2022-03-08T09:40:43.000Z | lib/blue_heron/att/responses/find_by_type_value_response.ex | amclain/blue_heron | e1802097ef6a845e28a8be56076f3b81ebb56206 | [
"Apache-2.0"
] | 20 | 2020-10-15T15:05:54.000Z | 2022-03-27T15:54:36.000Z | lib/blue_heron/att/responses/find_by_type_value_response.ex | amclain/blue_heron | e1802097ef6a845e28a8be56076f3b81ebb56206 | [
"Apache-2.0"
] | 11 | 2020-10-23T17:18:57.000Z | 2022-03-15T20:01:49.000Z | defmodule BlueHeron.ATT.FindByTypeValueResponse do
defstruct [:opcode, :handles_information_list]
defmodule HandlesInformation do
defstruct [:found_attribute_handle, :group_end_handle]
def serialize(%{
found_attribute_handle: found_attribute_handle,
group_end_handle: group_end_handle
}) do
<<found_attribute_handle::little-16, group_end_handle::little-16>>
end
def deserialize(<<found_attribute_handle::little-16, group_end_handle::little-16>>) do
%__MODULE__{
found_attribute_handle: found_attribute_handle,
group_end_handle: group_end_handle
}
end
end
def serialize(%{handles_information_list: handles_information_list}) do
handles_information_list =
handles_information_list
|> Enum.map(fn handles_info -> HandlesInformation.serialize(handles_info) end)
|> IO.iodata_to_binary()
<<0x07, handles_information_list::binary>>
end
def deserialize(<<0x07, handles_information_list::binary>>) do
handles_information_list =
for <<handles_info::binary-4 <- handles_information_list>> do
HandlesInformation.deserialize(handles_info)
end
%__MODULE__{opcode: 0x07, handles_information_list: handles_information_list}
end
end
| 31.85 | 90 | 0.736264 |
9e0c977d14c1e5f3e19708af05106908c535b6e7 | 811 | exs | Elixir | test/validation/polygon_complex_a_test.exs | otherchris/topo | 228c2c371c5e89cd1297662da7701c52c5d8b167 | [
"MIT"
] | null | null | null | test/validation/polygon_complex_a_test.exs | otherchris/topo | 228c2c371c5e89cd1297662da7701c52c5d8b167 | [
"MIT"
] | null | null | null | test/validation/polygon_complex_a_test.exs | otherchris/topo | 228c2c371c5e89cd1297662da7701c52c5d8b167 | [
"MIT"
] | null | null | null | defmodule Intersect.Validation.PolygonComplexATest do
use ExUnit.Case
@tag :validation
test "08-001 - AC A-shells overlapping B-shell at A-vertex" do
a = "POLYGON ((100 60, 140 100, 100 140, 60 100, 100 60))" |> Geo.WKT.decode()
b =
"MULTIPOLYGON (((80 40, 120 40, 120 80, 80 80, 80 40)),((120 80, 160 80, 160 120, 120 120, 120 80)),((80 120, 120 120, 120 160, 80 160, 80 120)),((40 80, 80 80, 80 120, 40 120, 40 80)))"
|> Geo.WKT.decode()
assert Topo.intersects?(a, b) === true
assert Topo.intersects?(b, a) === true
assert Topo.disjoint?(a, b) === false
assert Topo.disjoint?(b, a) === false
assert Topo.contains?(a, b) === false
assert Topo.within?(a, b) === false
assert Topo.equals?(a, b) === false
assert Topo.equals?(b, a) === false
end
end
| 36.863636 | 192 | 0.607891 |
9e0c9f05aa6509a19ad57adc7026a220d258e61a | 563 | ex | Elixir | lib/appsignal/error.ex | MeterSoft/appsignal-elixir | 52e3505b9dc90bce0795c4753a758d40bdf41463 | [
"MIT"
] | 234 | 2016-08-18T20:43:15.000Z | 2022-02-27T11:31:48.000Z | lib/appsignal/error.ex | MeterSoft/appsignal-elixir | 52e3505b9dc90bce0795c4753a758d40bdf41463 | [
"MIT"
] | 563 | 2016-07-25T17:45:14.000Z | 2022-03-21T11:39:29.000Z | lib/appsignal/error.ex | MeterSoft/appsignal-elixir | 52e3505b9dc90bce0795c4753a758d40bdf41463 | [
"MIT"
] | 86 | 2016-09-13T22:53:46.000Z | 2022-02-16T11:03:51.000Z | defmodule Appsignal.Error do
@moduledoc false
def metadata(%_{__exception__: true} = exception, stack) do
{
inspect(exception.__struct__),
Exception.format_banner(:error, exception, stack),
Appsignal.Stacktrace.format(stack)
}
end
def metadata(:error, reason, stack) do
:error
|> Exception.normalize(reason, stack)
|> metadata(stack)
end
def metadata(kind, reason, stack) do
{
inspect(kind),
Exception.format_banner(kind, reason, stack),
Appsignal.Stacktrace.format(stack)
}
end
end
| 22.52 | 61 | 0.666075 |
9e0caf52e3fbc478f38afa36703da04a873eb945 | 343 | exs | Elixir | leap/leap.exs | wobh/xelixir | 63e5beec40d5c441cf6cf87f30c29848f481bb1b | [
"MIT"
] | null | null | null | leap/leap.exs | wobh/xelixir | 63e5beec40d5c441cf6cf87f30c29848f481bb1b | [
"MIT"
] | null | null | null | leap/leap.exs | wobh/xelixir | 63e5beec40d5c441cf6cf87f30c29848f481bb1b | [
"MIT"
] | null | null | null | defmodule Year do
@doc """
Returns whether 'year' is a leap year.
A leap year occurs:
on every year that is evenly divisible by 4
except every year that is evenly divisible by 100
except every year that is evenly divisible by 400.
"""
@spec leap_year?(non_neg_integer) :: boolean
def leap_year?(year) do
end
end
| 21.4375 | 56 | 0.693878 |
9e0cdc385267c83143f5cfd4835178cc9b58b0e0 | 647 | ex | Elixir | lib/tarjeta.ex | ngarbezza/unq-sd-sube | 48481dd3e6d9e23b6271c313c4fe1a081e5365ca | [
"MIT"
] | null | null | null | lib/tarjeta.ex | ngarbezza/unq-sd-sube | 48481dd3e6d9e23b6271c313c4fe1a081e5365ca | [
"MIT"
] | null | null | null | lib/tarjeta.ex | ngarbezza/unq-sd-sube | 48481dd3e6d9e23b6271c313c4fe1a081e5365ca | [
"MIT"
] | null | null | null | defmodule Tarjeta do
@moduledoc false
@enforce_keys [:id]
defstruct id: nil, saldo: 0
def nueva_tarjeta(id) do
%Tarjeta{id: id}
end
def cargar(tarjeta, dinero) do
put_in(tarjeta.saldo, tarjeta.saldo + dinero)
end
def descontar(tarjeta, dinero) do
if puede_descontar(tarjeta, dinero) do
{:ok, put_in(tarjeta.saldo, tarjeta.saldo - dinero)}
else
error_de_saldo_insuficiente()
end
end
def error_de_saldo_insuficiente, do: {:error, "Saldo insuficiente"}
defp puede_descontar(tarjeta, dinero) do
saldo_minimo_permitido = -20
tarjeta.saldo - dinero >= saldo_minimo_permitido
end
end
| 21.566667 | 69 | 0.703246 |
9e0d078085654facd5564ddcd20500a87b549e24 | 356 | exs | Elixir | priv/repo/migrations/20180724180328_create_steps.exs | kenkeiras/MonTree | 7ec724634c123ee085921c4cac07800c9a15e4ec | [
"WTFPL"
] | 2 | 2018-10-26T06:06:42.000Z | 2020-02-16T15:01:42.000Z | priv/repo/migrations/20180724180328_create_steps.exs | kenkeiras/TechTree | 26c8ca59855002a88bf15eb0b64a6c788f438ec8 | [
"WTFPL"
] | 108 | 2018-10-25T10:30:33.000Z | 2021-07-28T04:18:03.000Z | priv/repo/migrations/20180724180328_create_steps.exs | kenkeiras/TechTree | 26c8ca59855002a88bf15eb0b64a6c788f438ec8 | [
"WTFPL"
] | null | null | null | defmodule Techtree.Repo.Migrations.CreateSteps do
use Ecto.Migration
def change do
create table(:steps) do
add :title, :string
add :description, :text
add :project_id, references(:projects, on_delete: :delete_all),
null: false
timestamps()
end
create index(:steps, [:project_id])
end
end
| 20.941176 | 69 | 0.632022 |
9e0d137ffd86865976e73c888b538a2c30185d54 | 704 | ex | Elixir | day_7/day_7.ex | cococov/advent-of-code-2021 | 2d398259730b5f9cbe71d35b8f0caecaf6c6e4f6 | [
"MIT"
] | null | null | null | day_7/day_7.ex | cococov/advent-of-code-2021 | 2d398259730b5f9cbe71d35b8f0caecaf6c6e4f6 | [
"MIT"
] | null | null | null | day_7/day_7.ex | cococov/advent-of-code-2021 | 2d398259730b5f9cbe71d35b8f0caecaf6c6e4f6 | [
"MIT"
] | null | null | null | defmodule Day7 do
def calculate_fuel_consumption(input, callback) do
{min, max} = Enum.min_max(input)
min..max
|> Enum.to_list()
|> Enum.map(fn x -> Enum.map(input, callback.(x)) end)
|> Enum.map(&Enum.sum/1)
|> Enum.min()
end
def result_1(input) do
calculate_fuel_consumption(input, fn x -> &abs(x - &1) end)
end
def result_2(input) do
calculate_fuel_consumption(input, fn x ->
fn y ->
n = abs(x - y)
trunc(n * (n + 1) / 2)
end
end)
end
end
input =
File.read!('input')
|> String.split(~r/,/)
|> Enum.map(&String.to_integer/1)
IO.puts("Answer 1: #{Day7.result_1(input)}")
IO.puts("Answer 2: #{Day7.result_2(input)}")
| 21.333333 | 63 | 0.590909 |
9e0d140985d31b998fc059b4ddbd75020b37f214 | 1,548 | ex | Elixir | clients/container/lib/google_api/container/v1/model/client_certificate_config.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/container/lib/google_api/container/v1/model/client_certificate_config.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/container/lib/google_api/container/v1/model/client_certificate_config.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Container.V1.Model.ClientCertificateConfig do
@moduledoc """
Configuration for client certificates on the cluster.
## Attributes
- issueClientCertificate (boolean()): Issue a client certificate. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:issueClientCertificate => any()
}
field(:issueClientCertificate)
end
defimpl Poison.Decoder, for: GoogleApi.Container.V1.Model.ClientCertificateConfig do
def decode(value, options) do
GoogleApi.Container.V1.Model.ClientCertificateConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Container.V1.Model.ClientCertificateConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.25 | 88 | 0.757752 |
9e0d2c3740b876dba766267d68a12d5df34940d8 | 3,853 | ex | Elixir | lib/plausible_web/controllers/invitation_controller.ex | plausible-insights/plausible | 88173342b9e969894879bfb2e8d203426f6a1b1c | [
"MIT"
] | 984 | 2019-09-02T11:36:41.000Z | 2020-06-08T06:25:48.000Z | lib/plausible_web/controllers/invitation_controller.ex | plausible-insights/plausible | 88173342b9e969894879bfb2e8d203426f6a1b1c | [
"MIT"
] | 24 | 2019-09-10T09:53:17.000Z | 2020-06-08T07:35:26.000Z | lib/plausible_web/controllers/invitation_controller.ex | plausible-insights/plausible | 88173342b9e969894879bfb2e8d203426f6a1b1c | [
"MIT"
] | 51 | 2019-09-03T10:48:10.000Z | 2020-06-07T00:23:34.000Z | defmodule PlausibleWeb.InvitationController do
use PlausibleWeb, :controller
use Plausible.Repo
alias Ecto.Multi
alias Plausible.Auth.Invitation
alias Plausible.Site.Membership
plug PlausibleWeb.RequireAccountPlug
def accept_invitation(conn, %{"invitation_id" => invitation_id}) do
invitation =
Repo.get_by!(Invitation, invitation_id: invitation_id)
|> Repo.preload([:site, :inviter])
user = conn.assigns[:current_user]
existing_membership = Repo.get_by(Membership, user_id: user.id, site_id: invitation.site.id)
multi =
if invitation.role == :owner do
Multi.new()
|> downgrade_previous_owner(invitation.site)
|> maybe_end_trial_of_new_owner(user)
else
Multi.new()
end
membership_changeset =
Membership.changeset(existing_membership || %Membership{}, %{
user_id: user.id,
site_id: invitation.site.id,
role: invitation.role
})
multi =
multi
|> Multi.insert_or_update(:membership, membership_changeset)
|> Multi.delete(:invitation, invitation)
case Repo.transaction(multi) do
{:ok, changes} ->
updated_user = Map.get(changes, :user, user)
notify_invitation_accepted(invitation)
Plausible.Billing.SiteLocker.check_sites_for(updated_user)
conn
|> put_flash(:success, "You now have access to #{invitation.site.domain}")
|> redirect(to: "/#{URI.encode_www_form(invitation.site.domain)}")
{:error, _} ->
conn
|> put_flash(:error, "Something went wrong, please try again")
|> redirect(to: "/sites")
end
end
defp downgrade_previous_owner(multi, site) do
prev_owner =
from(
sm in Plausible.Site.Membership,
where: sm.site_id == ^site.id,
where: sm.role == :owner
)
Multi.update_all(multi, :prev_owner, prev_owner, set: [role: :admin])
end
defp maybe_end_trial_of_new_owner(multi, new_owner) do
if !Application.get_env(:plausible, :is_selfhost) do
end_trial_of_new_owner(multi, new_owner)
end
end
defp end_trial_of_new_owner(multi, new_owner) do
if Plausible.Billing.on_trial?(new_owner) || is_nil(new_owner.trial_expiry_date) do
Ecto.Multi.update(multi, :user, Plausible.Auth.User.end_trial(new_owner))
else
multi
end
end
def reject_invitation(conn, %{"invitation_id" => invitation_id}) do
invitation =
Repo.get_by!(Invitation, invitation_id: invitation_id)
|> Repo.preload([:site, :inviter])
Repo.delete!(invitation)
notify_invitation_rejected(invitation)
conn
|> put_flash(:success, "You have rejected the invitation to #{invitation.site.domain}")
|> redirect(to: "/sites")
end
defp notify_invitation_accepted(%Invitation{role: :owner} = invitation) do
PlausibleWeb.Email.ownership_transfer_accepted(invitation)
|> Plausible.Mailer.send_email_safe()
end
defp notify_invitation_accepted(invitation) do
PlausibleWeb.Email.invitation_accepted(invitation)
|> Plausible.Mailer.send_email_safe()
end
defp notify_invitation_rejected(%Invitation{role: :owner} = invitation) do
PlausibleWeb.Email.ownership_transfer_rejected(invitation)
|> Plausible.Mailer.send_email_safe()
end
defp notify_invitation_rejected(invitation) do
PlausibleWeb.Email.invitation_rejected(invitation)
|> Plausible.Mailer.send_email_safe()
end
def remove_invitation(conn, %{"invitation_id" => invitation_id}) do
invitation =
Repo.get_by!(Invitation, invitation_id: invitation_id)
|> Repo.preload(:site)
Repo.delete!(invitation)
conn
|> put_flash(:success, "You have removed the invitation for #{invitation.email}")
|> redirect(to: Routes.site_path(conn, :settings_general, invitation.site.domain))
end
end
| 30.579365 | 96 | 0.694005 |
9e0d46f2b3417b06ad041c063211f675b2c0e111 | 741 | ex | Elixir | examples/new_relic_sandbox_umbrella/apps/new_relic_sandbox_web/lib/new_relic_sandbox_web/gettext.ex | surgeventures/new_relic_integration | 5417f15f7dd17022ee927e0cdd4fca32529ed278 | [
"MIT"
] | null | null | null | examples/new_relic_sandbox_umbrella/apps/new_relic_sandbox_web/lib/new_relic_sandbox_web/gettext.ex | surgeventures/new_relic_integration | 5417f15f7dd17022ee927e0cdd4fca32529ed278 | [
"MIT"
] | 2 | 2019-09-09T08:26:44.000Z | 2019-11-05T04:31:37.000Z | examples/new_relic_sandbox_umbrella/apps/new_relic_sandbox_web/lib/new_relic_sandbox_web/gettext.ex | surgeventures/new_relic_integration | 5417f15f7dd17022ee927e0cdd4fca32529ed278 | [
"MIT"
] | 1 | 2019-09-06T09:27:27.000Z | 2019-09-06T09:27:27.000Z | defmodule NewRelicSandboxWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import NewRelicSandboxWeb.Gettext
# Simple translation
gettext("Here is the string to translate")
# Plural translation
ngettext("Here is the string to translate",
"Here are the strings to translate",
3)
# Domain-based translation
dgettext("errors", "Here is the error message to translate")
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :new_relic_sandbox_web
end
| 29.64 | 72 | 0.690958 |
9e0d4c806353caa48b1d843928e315f1efe6bfd5 | 376 | ex | Elixir | lib/cineplex_web.ex | upmaru/cineplex | 7d1d516d3e3d3683b2ad4425b61517a8f556f721 | [
"MIT"
] | null | null | null | lib/cineplex_web.ex | upmaru/cineplex | 7d1d516d3e3d3683b2ad4425b61517a8f556f721 | [
"MIT"
] | null | null | null | lib/cineplex_web.ex | upmaru/cineplex | 7d1d516d3e3d3683b2ad4425b61517a8f556f721 | [
"MIT"
] | null | null | null | defmodule CineplexWeb do
use Plug.Router
plug(
Plug.Parsers,
parsers: [:urlencoded, :json],
pass: ["application/json"],
json_decoder: Jason
)
plug(CineplexWeb.Plugs.Health)
plug(Timber.Integrations.EventPlug)
plug(:match)
plug(:dispatch)
forward("/jobs", to: CineplexWeb.Jobs)
match _ do
send_resp(conn, 404, "not found")
end
end
| 15.666667 | 40 | 0.667553 |
9e0d4c93f8520805a9f4af9c054400f01948f246 | 257 | ex | Elixir | lib/easiest.ex | GleisonAndrade/easiest_api | d30298974fbd0d2538ba25ab1a43e125ea4cf6bf | [
"Apache-2.0"
] | null | null | null | lib/easiest.ex | GleisonAndrade/easiest_api | d30298974fbd0d2538ba25ab1a43e125ea4cf6bf | [
"Apache-2.0"
] | null | null | null | lib/easiest.ex | GleisonAndrade/easiest_api | d30298974fbd0d2538ba25ab1a43e125ea4cf6bf | [
"Apache-2.0"
] | null | null | null | defmodule EasiestApp do
@moduledoc """
EasiestApp keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 25.7 | 66 | 0.758755 |
9e0d81d56b6048a1091a6936d729d9d81b06a52f | 1,946 | ex | Elixir | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/auto_install_constraint.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/auto_install_constraint.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/auto_install_constraint.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AndroidEnterprise.V1.Model.AutoInstallConstraint do
@moduledoc """
The auto-install constraint. Defines a set of restrictions for installation. At least one of the fields must be set.
## Attributes
* `chargingStateConstraint` (*type:* `String.t`, *default:* `nil`) - Charging state constraint.
* `deviceIdleStateConstraint` (*type:* `String.t`, *default:* `nil`) - Device idle state constraint.
* `networkTypeConstraint` (*type:* `String.t`, *default:* `nil`) - Network type constraint.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:chargingStateConstraint => String.t(),
:deviceIdleStateConstraint => String.t(),
:networkTypeConstraint => String.t()
}
field(:chargingStateConstraint)
field(:deviceIdleStateConstraint)
field(:networkTypeConstraint)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidEnterprise.V1.Model.AutoInstallConstraint do
def decode(value, options) do
GoogleApi.AndroidEnterprise.V1.Model.AutoInstallConstraint.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidEnterprise.V1.Model.AutoInstallConstraint do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.716981 | 118 | 0.741521 |
9e0d9a00968d9806b0e18eac1bb052ed3aac6f67 | 749 | exs | Elixir | apps/idp/priv/repo/migrations/20191031215056_create_migration_history_table.exs | lbrty/idp-backend | 81d5f10ef6177a1e678b994331c5a09abbdca8d6 | [
"Apache-2.0"
] | null | null | null | apps/idp/priv/repo/migrations/20191031215056_create_migration_history_table.exs | lbrty/idp-backend | 81d5f10ef6177a1e678b994331c5a09abbdca8d6 | [
"Apache-2.0"
] | null | null | null | apps/idp/priv/repo/migrations/20191031215056_create_migration_history_table.exs | lbrty/idp-backend | 81d5f10ef6177a1e678b994331c5a09abbdca8d6 | [
"Apache-2.0"
] | null | null | null | defmodule Idp.Repo.Migrations.CreateMigrationHistoryTable do
use Ecto.Migration
def change do
create table(:migration_history, primary_key: false) do
add :id, :uuid, primary_key: true
add :notes, :string
add :migration_date, :utc_datetime
add :displaced_person_id, references(:displaced_persons, on_delete: :delete_all, type: :binary_id)
add :to_city_id, references(:cities, on_delete: :nilify_all, type: :binary_id)
add :to_state_id, references(:states, on_delete: :delete_all, type: :binary_id)
timestamps()
end
create index(:migration_history, [:displaced_person_id])
create index(:migration_history, [:to_city_id])
create index(:migration_history, [:to_state_id])
end
end
| 32.565217 | 104 | 0.723632 |
9e0dd0d36fbf56dad9ab846b8ca1ad1d5ade1831 | 102 | ex | Elixir | lib/grpow/repo.ex | Gitardo/grpow | 047a1a7faaa28006e59069d55dd783a048a287cf | [
"MIT"
] | null | null | null | lib/grpow/repo.ex | Gitardo/grpow | 047a1a7faaa28006e59069d55dd783a048a287cf | [
"MIT"
] | 2 | 2021-03-11T05:11:12.000Z | 2021-05-11T22:29:52.000Z | lib/grpow/repo.ex | g2t3s/grpow | 047a1a7faaa28006e59069d55dd783a048a287cf | [
"MIT"
] | null | null | null | defmodule Grpow.Repo do
use Ecto.Repo,
otp_app: :grpow,
adapter: Ecto.Adapters.Postgres
end
| 17 | 35 | 0.715686 |
9e0e02357d3de126e8411a447b89258d77480d00 | 960 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/request_builder.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/request_builder.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/request_builder.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.RequestBuilder do
@moduledoc """
Helper functions for building Tesla requests.
This module is no longer used. Please use GoogleApi.Gax.Request instead.
"""
end
| 36.923077 | 77 | 0.763542 |
9e0e1cfdd901b81dc7430311293988571d6db1bd | 2,993 | ex | Elixir | test/support/adapters/bypass/config/config.ex | stephenmoloney/openstex | e62f548d67e9c71317be670ed80c49e97d9b2dd2 | [
"MIT"
] | 6 | 2016-07-08T05:29:23.000Z | 2019-09-17T14:17:06.000Z | test/support/adapters/bypass/config/config.ex | stephenmoloney/openstex | e62f548d67e9c71317be670ed80c49e97d9b2dd2 | [
"MIT"
] | 16 | 2016-05-21T08:21:45.000Z | 2018-03-04T22:11:49.000Z | test/support/adapters/bypass/config/config.ex | stephenmoloney/openstex | e62f548d67e9c71317be670ed80c49e97d9b2dd2 | [
"MIT"
] | 2 | 2017-08-28T22:24:51.000Z | 2019-06-16T14:47:38.000Z | defmodule Openstex.Adapters.Bypass.Config do
@moduledoc :false
# @default_headers [{"Content-Type", "application/json; charset=utf-8"}] |> Enum.into(%{})
# @default_options [timeout: 10000, recv_timeout: 30000]
# @default_adapter HTTPipe.Adapters.Hackney
use Openstex.Adapter.Config
alias Openstex.Adapters.Bypass.Keystone.Utils
# public
def start_agent(client, opts) do
otp_app = Keyword.get(opts, :otp_app, :false) || raise("Client has not been configured correctly, missing `:otp_app`")
identity = create_identity(client, otp_app)
Agent.start_link(fn -> config(client, otp_app, identity) end, name: agent_name(client))
end
@doc "Gets the bypass related config variables from a supervised Agent"
def bypass_config(client) do
Agent.get(agent_name(client), fn(config) -> config[:bypass] end)
end
@doc :false
def swift_service_name do
"swift"
end
@doc :false
def swift_service_type do
"object-store"
end
# private
defp config(client, otp_app, identity) do
[
bypass: bypass_config(client, otp_app),
keystone: keystone_config(client, otp_app, identity),
swift: swift_config(client, otp_app, identity),
hackney: hackney_config(client, otp_app)
]
end
defp bypass_config(client, otp_app) do
client
|> __MODULE__.get_config_from_env(otp_app)
|> Keyword.fetch!(:bypass)
end
defp keystone_config(client, otp_app, identity) do
keystone_config = get_keystone_config_from_env(client, otp_app)
tenant_id = keystone_config[:tenant_id] ||
identity.token.tenant.id ||
raise("cannot retrieve the tenant_id for keystone")
user_id = keystone_config[:user_id] ||
identity.user.id ||
raise("cannot retrieve the user_id for keystone")
endpoint = keystone_config[:endpoint] ||
"http://localhost:3001/"
[
tenant_id: tenant_id,
user_id: user_id,
endpoint: endpoint
]
end
defp swift_config(client, otp_app, _identity) do
swift_config = get_swift_config_from_env(client, otp_app)
account_temp_url_key1 = swift_config[:account_temp_url_key1] ||
"bypass_temp_url_key1"
account_temp_url_key2 = swift_config[:account_temp_url_key2] ||
"bypass_temp_url_key2"
region = swift_config[:region] ||
"Bypass-Region-1"
[
account_temp_url_key1: account_temp_url_key1,
account_temp_url_key2: account_temp_url_key2,
region: region
]
end
defp hackney_config(client, otp_app) do
hackney_config = get_hackney_config_from_env(client, otp_app)
connect_timeout = hackney_config[:timeout] || 30_000
recv_timeout = hackney_config[:recv_timeout] || 180_000
[
timeout: connect_timeout,
recv_timeout: recv_timeout
]
end
defp create_identity(client, _otp_app) do
# return identity struct
Utils.create_identity(client)
end
end
| 28.504762 | 122 | 0.683261 |
9e0e27d7c1ab246dba2f97ed669b7bb21cb4ec63 | 3,164 | ex | Elixir | programming/elixir/conduit/lib/conduit/blog/aggregates/article.ex | NomikOS/learning | 268f94605214f6861ef476ca7573e68c068ccbe5 | [
"Unlicense"
] | null | null | null | programming/elixir/conduit/lib/conduit/blog/aggregates/article.ex | NomikOS/learning | 268f94605214f6861ef476ca7573e68c068ccbe5 | [
"Unlicense"
] | null | null | null | programming/elixir/conduit/lib/conduit/blog/aggregates/article.ex | NomikOS/learning | 268f94605214f6861ef476ca7573e68c068ccbe5 | [
"Unlicense"
] | null | null | null | defmodule Conduit.Blog.Aggregates.Article do
defstruct [
uuid: nil,
slug: nil,
title: nil,
description: nil,
body: nil,
tag_list: nil,
author_uuid: nil,
favorited_by_authors: MapSet.new(),
favorite_count: 0,
]
alias Conduit.Blog.Aggregates.Article
alias Conduit.Blog.Commands.{
FavoriteArticle,
PublishArticle,
UnfavoriteArticle,
}
alias Conduit.Blog.Events.{
ArticleFavorited,
ArticlePublished,
ArticleUnfavorited,
}
@doc """
Publish an article
"""
def execute(%Article{uuid: nil}, %PublishArticle{} = publish) do
%ArticlePublished{
article_uuid: publish.article_uuid,
slug: publish.slug,
title: publish.title,
description: publish.description,
body: publish.body,
tag_list: publish.tag_list,
author_uuid: publish.author_uuid,
}
end
@doc """
Favorite the article for an author
"""
def execute(%Article{uuid: nil}, %FavoriteArticle{}), do: {:error, :article_not_found}
def execute(
%Article{uuid: uuid, favorite_count: favorite_count} = article,
%FavoriteArticle{favorited_by_author_uuid: author_id})
do
case is_favorited?(article, author_id) do
true -> nil
false ->
%ArticleFavorited{
article_uuid: uuid,
favorited_by_author_uuid: author_id,
favorite_count: favorite_count + 1,
}
end
end
@doc """
Unfavorite the article for the user
"""
def execute(%Article{uuid: nil}, %UnfavoriteArticle{}), do: {:error, :article_not_found}
def execute(
%Article{uuid: uuid, favorite_count: favorite_count} = article,
%UnfavoriteArticle{unfavorited_by_author_uuid: author_id})
do
case is_favorited?(article, author_id) do
true ->
%ArticleUnfavorited{
article_uuid: uuid,
unfavorited_by_author_uuid: author_id,
favorite_count: favorite_count - 1,
}
false -> nil
end
end
# state mutators
def apply(%Article{} = article, %ArticlePublished{} = published) do
%Article{article |
uuid: published.article_uuid,
slug: published.slug,
title: published.title,
description: published.description,
body: published.body,
tag_list: published.tag_list,
author_uuid: published.author_uuid,
}
end
def apply(
%Article{favorited_by_authors: favorited_by} = article,
%ArticleFavorited{favorited_by_author_uuid: author_id, favorite_count: favorite_count})
do
%Article{article |
favorited_by_authors: MapSet.put(favorited_by, author_id),
favorite_count: favorite_count,
}
end
def apply(
%Article{favorited_by_authors: favorited_by} = article,
%ArticleUnfavorited{unfavorited_by_author_uuid: author_id, favorite_count: favorite_count})
do
%Article{article |
favorited_by_authors: MapSet.delete(favorited_by, author_id),
favorite_count: favorite_count,
}
end
# private helpers
# Is the article a favorite of the user?
defp is_favorited?(%Article{favorited_by_authors: favorited_by}, user_uuid) do
MapSet.member?(favorited_by, user_uuid)
end
end
| 25.934426 | 95 | 0.678887 |
9e0e4469e450b0aac381aa7abb0661dc2fd7b81e | 1,304 | exs | Elixir | config/dev.exs | bschmeck/ex_gnarl | 25d6961795f10a2d49efd1a29167a771ef9772f1 | [
"MIT"
] | null | null | null | config/dev.exs | bschmeck/ex_gnarl | 25d6961795f10a2d49efd1a29167a771ef9772f1 | [
"MIT"
] | 1 | 2017-04-21T17:02:56.000Z | 2017-04-21T17:02:56.000Z | config/dev.exs | bschmeck/ex_gnarl | 25d6961795f10a2d49efd1a29167a771ef9772f1 | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :ex_gnarl, ExGnarl.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch", "--stdin",
cd: Path.expand("../", __DIR__)]]
# Watch static and templates for browser reloading.
config :ex_gnarl, ExGnarl.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{web/views/.*(ex)$},
~r{web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Configure your database
config :ex_gnarl, ExGnarl.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "ex_gnarl_dev",
hostname: "localhost",
pool_size: 10
| 29.636364 | 73 | 0.694785 |
9e0ecfebea7f86caff0c278225197e918e7562e5 | 1,635 | exs | Elixir | config/prod.exs | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | null | null | null | config/prod.exs | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | 1 | 2021-07-24T16:26:03.000Z | 2021-07-24T16:26:03.000Z | config/prod.exs | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | null | null | null | import Config
config :sanbase, SanbaseWeb.Endpoint,
http: [
port: {:system, "PORT"},
protocol_options: [
max_header_name_length: 64,
max_header_value_length: 8192,
max_request_line_length: 16_384,
max_headers: 100
]
],
url: [host: "localhost", port: {:system, "PORT"}],
server: true,
cache_static_manifest: "priv/static/cache_manifest.json",
root: '.',
version: Application.spec(:sanbase, :vsn),
load_from_system_env: true,
secret_key_base: "${SECRET_KEY_BASE}",
live_view: [signing_salt: "${PHOENIX_LIVE_VIEW_SIGNING_SALT}"],
check_origin: ["//*.santiment.net"]
config :sanbase, ecto_repos: [Sanbase.Repo]
# Clickhousex does not support `:system` tuples. The configuration is done
# by defining defining `:url` in the ClickhouseRepo `init` function.
config :sanbase, Sanbase.ClickhouseRepo,
adapter: ClickhouseEcto,
loggers: [Ecto.LogEntry, Sanbase.Prometheus.EctoInstrumenter],
hostname: "clickhouse",
port: 8123,
database: "default",
username: "default",
password: "",
timeout: 60_000,
pool_size: {:system, "CLICKHOUSE_POOL_SIZE", "30"},
pool_overflow: 5
# Do not print debug messages in production
config :logger, level: :info
config :sanbase, Sanbase.ExternalServices.Etherscan.RateLimiter,
scale: 1000,
limit: 5,
time_between_requests: 250
config :sanbase, Sanbase.ExternalServices.Coinmarketcap,
api_url: "https://pro-api.coinmarketcap.com/"
config :sanbase, SanbaseWeb.Plug.SessionPlug,
domain: {:system, "SANTIMENT_ROOT_DOMAIN", ".santiment.net"}
if File.exists?("config/prod.secret.exs") do
import_config "prod.secret.exs"
end
| 29.196429 | 74 | 0.724159 |
9e0ed811b391240f50a666191d5322694b68d072 | 821 | ex | Elixir | testData/org/elixir_lang/parser_definition/bit_string_parsing_test_case/OneWithTrailingComma.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/bit_string_parsing_test_case/OneWithTrailingComma.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/bit_string_parsing_test_case/OneWithTrailingComma.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | << &one, >>
<< one \\ two, >>
<< one :: two, >>
<< one | two, >>
<< one = two, >>
<< one or two, >>
<< one || two, >>
<< one and two, >>
<< one && two, >>
<< one != two, >>
<< one < two, >>
<< one |> two, >>
<< one in two, >>
<< one ++ two, >>
<< one..two, >>
<< one + two, >>
<< one / two, >>
<< one * two, >>
<< one ^^^ two, >>
<< !one, >>
<< not one, >>
<< one.(), >>
<< One.Two[three], >>
<< One.Two, >>
<< One.two[three], >>
<< One.two(three)(four), >>
<< One.two, >>
<< @one[two], >>
<< @one, >>
<< one(two)(three), >>
<< one[two], >>
<< one, >>
<< @1, >>
<< &1, >>
<< !1, >>
<< fn -> end, >>
<< ( -> ), >>
<< ?1, >>
<< 1, >>
<< 0b1, >>
<< 0o1, >>
<< 0x1, >>
<< %{}, >>
<< {}, >>
<< <<>>, >>
<< "one", >>
<< """
one
""", >>
<< 'one', >>
<< '''
one
''', >>
<< ~x{one}, >>
<< false, >>
<< :one, >>
<< One, >>
| 14.155172 | 27 | 0.309379 |
9e0ee03060a9e5276d5cf0d1dddd584f314fe616 | 2,136 | exs | Elixir | example/mix.exs | fazibear/kiosk_system_amdgpu_x86_64 | ab083e14d13d1caa3a050960cdccdb43b47dfe1e | [
"Apache-2.0"
] | null | null | null | example/mix.exs | fazibear/kiosk_system_amdgpu_x86_64 | ab083e14d13d1caa3a050960cdccdb43b47dfe1e | [
"Apache-2.0"
] | null | null | null | example/mix.exs | fazibear/kiosk_system_amdgpu_x86_64 | ab083e14d13d1caa3a050960cdccdb43b47dfe1e | [
"Apache-2.0"
] | null | null | null | defmodule Example.MixProject do
use Mix.Project
System.put_env("MIX_TARGET", "x86_64")
@target System.get_env("MIX_TARGET") || "host"
def project do
[
app: :example,
version: "0.1.0",
elixir: "~> 1.4",
target: @target,
archives: [nerves_bootstrap: "~> 1.0-rc"],
deps_path: "deps/#{@target}",
build_path: "_build/#{@target}",
lockfile: "mix.lock.#{@target}",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
aliases: ["loadconfig": [&bootstrap/1]],
deps: deps(),
compilers: [:elixir_make] ++ Mix.compilers,
make_makefile: "mix.mk",
]
end
# Starting nerves_bootstrap adds the required aliases to Mix.Project.config()
# Aliases are only added if MIX_TARGET is set.
def bootstrap(args) do
Application.start(:nerves_bootstrap)
Mix.Task.run("loadconfig", args)
end
# Run "mix help compile.app" to learn about applications.
def application, do: application(@target)
# Specify target specific application configurations
# It is common that the application start function will start and supervise
# applications which could cause the host to fail. Because of this, we only
# invoke Example.start/2 when running on a target.
def application("host") do
[extra_applications: [:logger]]
end
def application(_target) do
[mod: {Example.Application, []}, extra_applications: [:logger]]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:elixir_make, "~> 0.4", runtime: false},
{:nerves, "~> 1.0-rc", runtime: false},
{:ring_logger, "~> 0.4"}
] ++ deps(@target)
end
# Specify target specific dependencies
defp deps("host"), do: []
defp deps(target) do
[
{:shoehorn, "~> 0.2"},
{:nerves_runtime, "~> 0.4"},
{:nerves_network, "~> 0.3"},
{:nerves_init_gadget, "~> 0.1"},
{:muontrap, "~> 0.2"}
] ++ system(target)
end
defp system("x86_64"), do: [{:kiosk_system_amdgpu_x86_64, path: "../", runtime: false}]
defp system(target), do: Mix.raise "Unknown MIX_TARGET: #{target}"
end
| 28.864865 | 89 | 0.628745 |
9e0ee39e9b3bb4bd6c52bd4abdafb3f8fe907bfc | 2,315 | ex | Elixir | DL-DWS/DL-DWS (f02=232263168,s=0.000302459,m0=1370).ex | Realscrat/decentlab-decoders | 3ca5006cd85e3772a15a1b3fff3922c50979eeb6 | [
"MIT"
] | 13 | 2020-01-18T22:08:44.000Z | 2022-02-06T14:19:57.000Z | DL-DWS/DL-DWS (f02=232263168,s=0.000302459,m0=1370).ex | Realscrat/decentlab-decoders | 3ca5006cd85e3772a15a1b3fff3922c50979eeb6 | [
"MIT"
] | 4 | 2019-05-10T07:17:41.000Z | 2021-10-20T16:24:04.000Z | DL-DWS/DL-DWS (f02=232263168,s=0.000302459,m0=1370).ex | Realscrat/decentlab-decoders | 3ca5006cd85e3772a15a1b3fff3922c50979eeb6 | [
"MIT"
] | 15 | 2019-06-04T06:13:32.000Z | 2022-02-15T07:28:52.000Z |
# https://www.decentlab.com/products/weighing-scale-for-lorawan
defmodule DecentlabDecoder do
@protocol_version 2
# device-specific parameters
@f02 232263168
@s 0.000302459
@m0 1370
defp sensor_defs do
[
%{
length: 3,
values: [
%{
:name => "Frequency",
:convert => fn x -> Enum.at(x, 0) / Enum.at(x, 1) * 32768 end,
:unit => "Hz"
},
%{
:name => "Weight",
:convert => fn x -> (:math.pow(Enum.at(x, 0) / Enum.at(x, 1) * 32768, 2) - @f02) * @s + @m0 end,
:unit => "g"
}
]
},
%{
length: 1,
values: [
%{
:name => "Battery voltage",
:convert => fn x -> Enum.at(x, 0) / 1000 end,
:unit => "V"
}
]
}
]
end
def decode(msg, :hex) do
{:ok, bytes} = Base.decode16(msg, case: :mixed)
decode(bytes)
end
def decode(msg) when is_binary(msg), do: decode_binary(msg)
def decode(msg), do: to_string(msg) |> decode
defp decode_binary(<<@protocol_version, device_id::size(16), flags::binary-size(2), bytes::binary>>) do
bytes
|> bytes_to_words()
|> sensor(flags, sensor_defs())
|> Map.put("Device ID", device_id)
|> Map.put("Protocol version", @protocol_version)
end
defp bytes_to_words(<<>>), do: []
defp bytes_to_words(<<word::size(16), rest::binary>>), do: [word | bytes_to_words(rest)]
defp sensor(words, <<flags::size(15), 1::size(1)>>, [%{length: len, values: value_defs} | rest]) do
{x, rest_words} = Enum.split(words, len)
value(value_defs, x)
|> Map.merge(sensor(rest_words, <<0::size(1), flags::size(15)>>, rest))
end
defp sensor(words, <<flags::size(15), 0::size(1)>>, [_cur | rest]) do
sensor(words, <<0::size(1), flags::size(15)>>, rest)
end
defp sensor([], _flags, []), do: %{}
defp value([], _x), do: %{}
defp value([%{convert: nil} | rest], x), do: value(rest, x)
defp value([%{name: name, unit: unit, convert: convert} | rest], x) do
value(rest, x)
|> Map.put(name, %{"unit" => unit, "value" => convert.(x)})
end
end
IO.inspect(DecentlabDecoder.decode("0203d400033bf67fff3bf60c60", :hex))
IO.inspect(DecentlabDecoder.decode("0203d400020c60", :hex))
| 25.163043 | 108 | 0.544708 |
9e0f01f4d6d429e52a1523eba3aeffcf623dd140 | 2,412 | ex | Elixir | lib/aws/iot/thing_shadow/supervisor.ex | heri16/aws-iot-device-sdk-elixir | 1bdfc539240336a98f57fb3df473cfd66b786442 | [
"Apache-2.0"
] | 27 | 2016-07-09T01:22:17.000Z | 2022-02-22T02:44:16.000Z | lib/aws/iot/thing_shadow/supervisor.ex | heri16/aws-iot-device-sdk-elixir | 1bdfc539240336a98f57fb3df473cfd66b786442 | [
"Apache-2.0"
] | 1 | 2016-08-04T02:56:34.000Z | 2016-08-08T17:18:48.000Z | lib/aws/iot/thing_shadow/supervisor.ex | heri16/aws-iot-device-sdk-elixir | 1bdfc539240336a98f57fb3df473cfd66b786442 | [
"Apache-2.0"
] | 6 | 2016-08-03T15:49:17.000Z | 2018-04-15T12:15:56.000Z | defmodule Aws.Iot.ThingShadow.Supervisor do
@moduledoc ~S"""
Module-based supervisor for `ThingShadow.Client`.
Ensures that event handling and dispatch can survive the lifetime of `ThingShadow.Client`.
You may want to use a module-based supervisor if:
- You need to perform some particular action on supervisor initialization, like setting up an ETS table.
- You want to perform partial hot-code swapping of the tree. For example, if you add or remove children, the module-based supervision will add and remove the new children directly, while dynamic supervision requires the whole tree to be restarted in order to perform such swaps.
"""
use Supervisor
@supervision_strategy :rest_for_one
@doc """
Starts the supervisor
"""
def start_link(client_name \\ Aws.Iot.ThingShadow.Client, opts \\ [])
def start_link(client_name, opts) do
supervisor_name = name_concat(client_name, "Supervisor")
# To start the supervisor, the init/1 callback will be invoked in the given module, with init_args as its argument
Supervisor.start_link(__MODULE__, [client_name, opts], name: supervisor_name)
end
@doc """
Callback invoked to start the supervisor and during hot code upgrades.
"""
def init([client_name, opts]) when is_list(opts) do
mqttc_options_or_app_name = Keyword.get(opts, :mqtt, nil) || Keyword.get(opts, :app_config, :aws_iot)
event_manager_name = name_concat(client_name, "EventManager")
# ThingShadow worker must be able to lookup its GenEvent.manager by name, not by pid.
# This is so that when GenEvent.manager is restarted with a new pid, the reference is still valid.
children = [
worker(GenEvent, [[name: event_manager_name]]),
worker(Aws.Iot.ThingShadow.Client, [event_manager_name, mqttc_options_or_app_name, [name: client_name]])
]
# Init must return a supervisor spec
supervise(children, strategy: @supervision_strategy)
end
# Handles more cases than `Module.concat/2`
defp name_concat(name1, name2) when is_binary(name2) do
case name1 do
{:via, via_module, name} -> {:via, via_module, "#{name}.#{name2}"}
{:global, name} -> {:global, "#{name}.#{name2}"}
name when is_atom(name) -> :"#{name}.#{name2}"
end
end
defp name_concat(name1, name2) when is_atom(name2) do
name2 = tl Module.split(Module.concat(name1, name2))
name_concat(name1, name2)
end
end | 41.586207 | 280 | 0.723466 |
9e0f6cc6cc2f75c83788f19e12c43ebfebf73b46 | 1,414 | exs | Elixir | test/models/collector_log_test.exs | seansu4you87/betazoids | a8aac9074f5efaad4cd88ffdf7cdef53d4beb5cd | [
"MIT"
] | null | null | null | test/models/collector_log_test.exs | seansu4you87/betazoids | a8aac9074f5efaad4cd88ffdf7cdef53d4beb5cd | [
"MIT"
] | null | null | null | test/models/collector_log_test.exs | seansu4you87/betazoids | a8aac9074f5efaad4cd88ffdf7cdef53d4beb5cd | [
"MIT"
] | null | null | null | defmodule Betazoids.CollectorLogTest do
use Betazoids.ModelCase
alias Betazoids.CollectorLog
alias Betazoids.Repo
@valid_attrs %{done: true, fetch_count: 42, message_count: 42, next_url: nil}
@invalid_attrs %{}
test "changeset with valid attributes" do
changeset = CollectorLog.changeset(%CollectorLog{}, @valid_attrs)
assert changeset.valid?
end
test "transactions when updating existing variable" do
changeset = CollectorLog.changeset(%CollectorLog{}, %{})
{:ok, collector_log} = Repo.insert(changeset)
changeset2 = CollectorLog.changeset(collector_log, %{fetch_count: 2})
# LESSON(yu): WOW okay this is huge. You can change an outerscoped variable
# inside of an anonymous function. So instead, here I'm taking advantage
# of the fact that `Repo.transaction` returns the value of the anonymous
# function with `{:ok, value}`. This was a huge time sink. Remember this!
{:ok, collector_log} = Repo.transaction fn ->
{:ok, updated} = Repo.update(changeset2)
updated
end
[refetched_log] = Repo.all(CollectorLog)
assert collector_log.fetch_count == 2
assert refetched_log.fetch_count == 2
end
test "changeset with invalid attributes" do
changeset = CollectorLog.changeset(%CollectorLog{}, @invalid_attrs)
# DETAIL(yu): You can create a CollectorLog with an empty hash
assert changeset.valid?
end
end
| 32.883721 | 80 | 0.718529 |
9e0f886ba9f9aad7f100649d4f563a73b0b595d7 | 1,063 | exs | Elixir | mix.exs | nitkagoshima-sysken/Eagle | 11597dafff00e82153f9fafe25be756e1361358e | [
"MIT"
] | 1 | 2015-11-15T08:07:48.000Z | 2015-11-15T08:07:48.000Z | mix.exs | nitkagoshima-sysken/Eagle | 11597dafff00e82153f9fafe25be756e1361358e | [
"MIT"
] | null | null | null | mix.exs | nitkagoshima-sysken/Eagle | 11597dafff00e82153f9fafe25be756e1361358e | [
"MIT"
] | null | null | null | defmodule Eagle.Mixfile do
use Mix.Project
def project do
[app: :eagle,
version: "0.0.1",
elixir: "~> 1.0",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[mod: {Eagle, []},
applications: [:phoenix, :phoenix_html, :cowboy, :logger,
:phoenix_ecto, :postgrex]]
end
# Specifies which paths to compile per environment
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies
#
# Type `mix help deps` for examples and options
defp deps do
[{:phoenix, "~> 0.15"},
{:phoenix_ecto, "~> 0.8"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 1.4"},
{:phoenix_live_reload, "~> 0.5", only: :dev},
{:cowboy, "~> 1.0"}]
end
end
| 26.575 | 63 | 0.595484 |
9e0f9088b4fff4a32198313e7cf2ac1826e40482 | 231 | ex | Elixir | test/support/web/coherence/redirects.ex | danschultzer/coherence_assent | 538e7e4aba3146c9bf4ac7798fea4b8a0ff099d5 | [
"Unlicense",
"MIT"
] | 22 | 2017-09-15T17:52:31.000Z | 2018-10-07T02:36:27.000Z | test/support/web/coherence/redirects.ex | danschultzer/coherence_oauth2 | 538e7e4aba3146c9bf4ac7798fea4b8a0ff099d5 | [
"Unlicense",
"MIT"
] | 15 | 2017-11-01T15:39:37.000Z | 2019-03-11T18:02:04.000Z | test/support/web/coherence/redirects.ex | danschultzer/coherence_oauth2 | 538e7e4aba3146c9bf4ac7798fea4b8a0ff099d5 | [
"Unlicense",
"MIT"
] | 9 | 2017-09-18T20:48:06.000Z | 2018-12-05T15:24:24.000Z | defmodule Coherence.Redirects do
@moduledoc false
use Redirects
def session_create(conn, _), do: redirect(conn, to: "/session_created")
def registration_create(conn, _), do: redirect(conn, to: "/registration_created")
end
| 28.875 | 83 | 0.753247 |
9e0f9ffddd3fd1577417f67fa30c7f4f531e58fb | 6,996 | ex | Elixir | lib/hpack/string.ex | kiennt/hpack | 6047e69204796c69b789bb9a96689430c92577d4 | [
"MIT"
] | 1 | 2016-03-13T00:03:17.000Z | 2016-03-13T00:03:17.000Z | lib/hpack/string.ex | kiennt/hpack | 6047e69204796c69b789bb9a96689430c92577d4 | [
"MIT"
] | null | null | null | lib/hpack/string.ex | kiennt/hpack | 6047e69204796c69b789bb9a96689430c92577d4 | [
"MIT"
] | null | null | null | defmodule HPACK.String do
@huffman_table [
{0x1ff8, 13},
{0x7fffd8, 23},
{0xfffffe2, 28},
{0xfffffe3, 28},
{0xfffffe4, 28},
{0xfffffe5, 28},
{0xfffffe6, 28},
{0xfffffe7, 28},
{0xfffffe8, 28},
{0xffffea, 24},
{0x3ffffffc, 30},
{0xfffffe9, 28},
{0xfffffea, 28},
{0x3ffffffd, 30},
{0xfffffeb, 28},
{0xfffffec, 28},
{0xfffffed, 28},
{0xfffffee, 28},
{0xfffffef, 28},
{0xffffff0, 28},
{0xffffff1, 28},
{0xffffff2, 28},
{0x3ffffffe, 30},
{0xffffff3, 28},
{0xffffff4, 28},
{0xffffff5, 28},
{0xffffff6, 28},
{0xffffff7, 28},
{0xffffff8, 28},
{0xffffff9, 28},
{0xffffffa, 28},
{0xffffffb, 28},
{0x14, 6},
{0x3f8, 10},
{0x3f9, 10},
{0xffa, 12},
{0x1ff9, 13},
{0x15, 6},
{0xf8, 8},
{0x7fa, 11},
{0x3fa, 10},
{0x3fb, 10},
{0xf9, 8},
{0x7fb, 11},
{0xfa, 8},
{0x16, 6},
{0x17, 6},
{0x18, 6},
{0x0, 5},
{0x1, 5},
{0x2, 5},
{0x19, 6},
{0x1a, 6},
{0x1b, 6},
{0x1c, 6},
{0x1d, 6},
{0x1e, 6},
{0x1f, 6},
{0x5c, 7},
{0xfb, 8},
{0x7ffc, 15},
{0x20, 6},
{0xffb, 12},
{0x3fc, 10},
{0x1ffa, 13},
{0x21, 6},
{0x5d, 7},
{0x5e, 7},
{0x5f, 7},
{0x60, 7},
{0x61, 7},
{0x62, 7},
{0x63, 7},
{0x64, 7},
{0x65, 7},
{0x66, 7},
{0x67, 7},
{0x68, 7},
{0x69, 7},
{0x6a, 7},
{0x6b, 7},
{0x6c, 7},
{0x6d, 7},
{0x6e, 7},
{0x6f, 7},
{0x70, 7},
{0x71, 7},
{0x72, 7},
{0xfc, 8},
{0x73, 7},
{0xfd, 8},
{0x1ffb, 13},
{0x7fff0, 19},
{0x1ffc, 13},
{0x3ffc, 14},
{0x22, 6},
{0x7ffd, 15},
{0x3, 5},
{0x23, 6},
{0x4, 5},
{0x24, 6},
{0x5, 5},
{0x25, 6},
{0x26, 6},
{0x27, 6},
{0x6, 5},
{0x74, 7},
{0x75, 7},
{0x28, 6},
{0x29, 6},
{0x2a, 6},
{0x7, 5},
{0x2b, 6},
{0x76, 7},
{0x2c, 6},
{0x8, 5},
{0x9, 5},
{0x2d, 6},
{0x77, 7},
{0x78, 7},
{0x79, 7},
{0x7a, 7},
{0x7b, 7},
{0x7ffe, 15},
{0x7fc, 11},
{0x3ffd, 14},
{0x1ffd, 13},
{0xffffffc, 28},
{0xfffe6, 20},
{0x3fffd2, 22},
{0xfffe7, 20},
{0xfffe8, 20},
{0x3fffd3, 22},
{0x3fffd4, 22},
{0x3fffd5, 22},
{0x7fffd9, 23},
{0x3fffd6, 22},
{0x7fffda, 23},
{0x7fffdb, 23},
{0x7fffdc, 23},
{0x7fffdd, 23},
{0x7fffde, 23},
{0xffffeb, 24},
{0x7fffdf, 23},
{0xffffec, 24},
{0xffffed, 24},
{0x3fffd7, 22},
{0x7fffe0, 23},
{0xffffee, 24},
{0x7fffe1, 23},
{0x7fffe2, 23},
{0x7fffe3, 23},
{0x7fffe4, 23},
{0x1fffdc, 21},
{0x3fffd8, 22},
{0x7fffe5, 23},
{0x3fffd9, 22},
{0x7fffe6, 23},
{0x7fffe7, 23},
{0xffffef, 24},
{0x3fffda, 22},
{0x1fffdd, 21},
{0xfffe9, 20},
{0x3fffdb, 22},
{0x3fffdc, 22},
{0x7fffe8, 23},
{0x7fffe9, 23},
{0x1fffde, 21},
{0x7fffea, 23},
{0x3fffdd, 22},
{0x3fffde, 22},
{0xfffff0, 24},
{0x1fffdf, 21},
{0x3fffdf, 22},
{0x7fffeb, 23},
{0x7fffec, 23},
{0x1fffe0, 21},
{0x1fffe1, 21},
{0x3fffe0, 22},
{0x1fffe2, 21},
{0x7fffed, 23},
{0x3fffe1, 22},
{0x7fffee, 23},
{0x7fffef, 23},
{0xfffea, 20},
{0x3fffe2, 22},
{0x3fffe3, 22},
{0x3fffe4, 22},
{0x7ffff0, 23},
{0x3fffe5, 22},
{0x3fffe6, 22},
{0x7ffff1, 23},
{0x3ffffe0, 26},
{0x3ffffe1, 26},
{0xfffeb, 20},
{0x7fff1, 19},
{0x3fffe7, 22},
{0x7ffff2, 23},
{0x3fffe8, 22},
{0x1ffffec, 25},
{0x3ffffe2, 26},
{0x3ffffe3, 26},
{0x3ffffe4, 26},
{0x7ffffde, 27},
{0x7ffffdf, 27},
{0x3ffffe5, 26},
{0xfffff1, 24},
{0x1ffffed, 25},
{0x7fff2, 19},
{0x1fffe3, 21},
{0x3ffffe6, 26},
{0x7ffffe0, 27},
{0x7ffffe1, 27},
{0x3ffffe7, 26},
{0x7ffffe2, 27},
{0xfffff2, 24},
{0x1fffe4, 21},
{0x1fffe5, 21},
{0x3ffffe8, 26},
{0x3ffffe9, 26},
{0xffffffd, 28},
{0x7ffffe3, 27},
{0x7ffffe4, 27},
{0x7ffffe5, 27},
{0xfffec, 20},
{0xfffff3, 24},
{0xfffed, 20},
{0x1fffe6, 21},
{0x3fffe9, 22},
{0x1fffe7, 21},
{0x1fffe8, 21},
{0x7ffff3, 23},
{0x3fffea, 22},
{0x3fffeb, 22},
{0x1ffffee, 25},
{0x1ffffef, 25},
{0xfffff4, 24},
{0xfffff5, 24},
{0x3ffffea, 26},
{0x7ffff4, 23},
{0x3ffffeb, 26},
{0x7ffffe6, 27},
{0x3ffffec, 26},
{0x3ffffed, 26},
{0x7ffffe7, 27},
{0x7ffffe8, 27},
{0x7ffffe9, 27},
{0x7ffffea, 27},
{0x7ffffeb, 27},
{0xffffffe, 28},
{0x7ffffec, 27},
{0x7ffffed, 27},
{0x7ffffee, 27},
{0x7ffffef, 27},
{0x7fffff0, 27},
{0x3ffffee, 26},
{0x3fffffff, 30}]
@doc """
Encode string
http://httpwg.org/specs/rfc7541.html#string.literal.representation
"""
@spec encode(String.t, boolean) :: {binary}
def encode(value, huffman) do
case huffman do
false ->
size_bin = HPACK.Integer.encode(byte_size(value), 7)
data = encode_normal_string(value, <<>>)
<<0::1, size_bin::bitstring, data::bitstring>>
true ->
data = encode_huffman_string(value, <<>>)
size_bin = HPACK.Integer.encode(byte_size(data), 7)
<<1::1, size_bin::bitstring, data::bitstring>>
end
end
def encode_normal_string("", acc),
do: acc
def encode_normal_string(<<first, res::binary>>, acc),
do: encode_normal_string(res, acc <> <<first>>)
@huffman_table
|> Stream.with_index
|> Enum.each(fn({{code, n}, index}) ->
defp encode_huffman_string(<<unquote(index), rest::binary>>, acc) do
encode_huffman_string(rest, <<acc::bitstring, unquote(Macro.escape(code))::size(unquote(n))>>)
end
end)
defp encode_huffman_string(<<>>, acc) do
bit_left = 8 - rem(bit_size(acc), 8)
<<acc::bitstring, 0xfff::size(bit_left)>>
end
@doc """
Decoding string
http://httpwg.org/specs/rfc7541.html#string.literal.representation
"""
@spec decode(binary) :: {String.t, binary}
def decode(<<huffman::1, bin::bitstring>>) do
{string_length, remain} = HPACK.Integer.decode(bin, 7)
<<chunk::binary-size(string_length), left_data::bitstring>> = remain
case huffman do
0 -> {decode_normal_string(chunk, <<>>), left_data}
1 -> {decode_huffman_string(chunk, <<>>), left_data}
end
end
defp decode_normal_string(<<>>, acc),
do: acc
defp decode_normal_string(<<first, bin::binary>>, acc),
do: decode_normal_string(bin, acc <> <<first>>)
@huffman_table
|> Stream.with_index
|> Enum.each(fn({{code, n}, index}) ->
defp decode_huffman_string(<<unquote(Macro.escape(code))::size(unquote(n)), bin::bitstring>>, acc) do
decode_huffman_string(bin, acc <> <<unquote(index)>>)
end
end)
defp decode_huffman_string(_, acc) do
acc
end
end
| 21.460123 | 105 | 0.513579 |
9e0fa21c170b703732f9f06dfbcb507ea6595b9b | 1,097 | ex | Elixir | lib/scenic/primitive/style/stroke.ex | mikeover/scenic | 4b61c4996ed2d06b8cdf94f88c8a0522160e10b5 | [
"Apache-2.0"
] | null | null | null | lib/scenic/primitive/style/stroke.ex | mikeover/scenic | 4b61c4996ed2d06b8cdf94f88c8a0522160e10b5 | [
"Apache-2.0"
] | null | null | null | lib/scenic/primitive/style/stroke.ex | mikeover/scenic | 4b61c4996ed2d06b8cdf94f88c8a0522160e10b5 | [
"Apache-2.0"
] | null | null | null | #
# Created by Boyd Multerer on 5/6/17.
# Copyright © 2017 Kry10 Industries. All rights reserved.
#
defmodule Scenic.Primitive.Style.Stroke do
use Scenic.Primitive.Style
alias Scenic.Primitive.Style.Paint
# ============================================================================
# data verification and serialization
# --------------------------------------------------------
def info(data),
do: """
#{IO.ANSI.red()}#{__MODULE__} data must be {width, paint_type}
#{IO.ANSI.yellow()}Received: #{inspect(data)}
This is very similar to the :fill style. with an added width
examples:
{12, :red}
{12, {:color, :red}}
#{IO.ANSI.default_color()}
"""
# --------------------------------------------------------
# named color
def verify(stroke) do
try do
normalize(stroke)
true
rescue
_ -> false
end
end
# --------------------------------------------------------
def normalize({width, paint}) when is_number(width) and width >= 0 do
{width, Paint.normalize(paint)}
end
end
| 24.377778 | 80 | 0.474932 |
9e1015b8d56dd02c8341d9f92f414b75bfd794d4 | 6,548 | ex | Elixir | lib/erlef/academic_papers.ex | pedrosnk/erlef-website | bb8da73d09930056c9d31bcc75a92b8fb3caf6da | [
"Apache-2.0"
] | null | null | null | lib/erlef/academic_papers.ex | pedrosnk/erlef-website | bb8da73d09930056c9d31bcc75a92b8fb3caf6da | [
"Apache-2.0"
] | null | null | null | lib/erlef/academic_papers.ex | pedrosnk/erlef-website | bb8da73d09930056c9d31bcc75a92b8fb3caf6da | [
"Apache-2.0"
] | null | null | null | defmodule Erlef.AcademicPapers do
@moduledoc """
Context responsible for managing Academic Papers
"""
@doc """
Using this stub for now. If we ever want to replace the datastore, we can.
Date convention: If you are only given a year, make the date Jan 1st for that
year.
"""
def all() do
[
%{
link: "http://erlang.org/download/armstrong_thesis_2003.pdf",
name: "Making reliable distributed systems in the presence of software errors",
tags: "Erlang",
date: Date.new(2003, 12, 1)
},
%{
link:
"https://www.researchgate.net/profile/Phil_Trinder/publication/221211369_Comparing_C_and_ERLANG_for_motorola_telecoms_software/links/570fb77408aec95f061589cb/Comparing-C-and-ERLANG-for-motorola-telecoms-software.pdf",
name: "Comparing C++ and ERLANG for motorola telecoms software",
tags: "Erlang",
date: Date.new(2006, 1, 1)
},
%{
link:
"https://kth.diva-portal.org/smash/record.jsf?searchId=2&pid=diva2%3A392243&dswid=-4049",
name: "Characterizing the Scalability of Erlang VM on Many-core Processors",
tags: "BEAM",
date: Date.new(2011, 1, 1)
},
%{
link: "https://jeena.net/t/GGS.pdf",
name: "A Generic Game Server",
tags: "Erlang, BEAM",
date: Date.new(2011, 1, 1)
},
%{
link: "https://pdfs.semanticscholar.org/cd4e/7b338616eb6a87a542aa224d5d1b3e7aee41.pdf",
name: "SFMT pseudo random number generator for Erlang",
tags: "Erlang, PRNG",
date: Date.new(2011, 9, 23)
},
%{
link:
"https://www.researchgate.net/publication/254464022_TinyMT_pseudo_random_number_generator_for_Erlang",
name: "TinyMT pseudo random number generator for Erlang",
tags: "Erlang, PRNG",
date: Date.new(2012, 9, 14)
},
%{
link: "https://pdfs.semanticscholar.org/62c5/81a08fcb58cb89586486a949376d984a3303.pdf",
name: "Multiplayer Game Server for Turn-Based Mobile Games in Erlang",
tags: "Erlang, BEAM",
date: Date.new(2013, 2, 1)
},
%{
link: "https://staff.um.edu.mt/afra1/papers/rv13jour.pdf",
name: "Synthesising correct concurrent runtime monitors",
tags: "Erlang",
date: Date.new(2014, 11, 20)
},
%{
link: "http://ds.cs.ut.ee/courses/course-files/To303nis%20Pool%20.pdf",
name: "Comparison of Erlang Runtime System and Java Virtual Machine",
tags: "BEAM",
date: Date.new(2015, 5, 1)
},
%{
link: "http://www.dcs.gla.ac.uk/research/sd-erlang/sd-erlang-improving-jpdc-16.pdf",
name: "Improving the Network Scalability of Erlang",
tags: "Erlang",
date: Date.new(2015, 8, 26)
},
%{
link: "https://staff.um.edu.mt/afra1/papers/rv2016.pdf",
name: "A Monitoring Tool for a Branching-Time Logic",
tags: "Erlang",
date: Date.new(2016, 5, 1)
},
%{
link: "https://ieeexplore.ieee.org/document/7797392",
name: "Elixir programming language evaluation for IoT (in Japanese)",
tags: "Elixir",
date: Date.new(2016, 9, 28)
},
%{
link: "https://staff.um.edu.mt/afra1/papers/betty-book.pdf",
name: "A Runtime Monitoring Tool for Actor-Based Systems",
tags: "Erlang",
date: Date.new(2017, 1, 1)
},
%{
link: "http://www.dcs.gla.ac.uk/research/sd-erlang/release-summary-arxiv.pdf",
name:
"Scaling Reliably: Improving the Scalability of the Erlang Distributed Actor Platform",
tags: "Erlang, BEAM",
date: Date.new(2017, 4, 25)
},
%{
link:
"https://www.researchgate.net/publication/320360988_Sparrow_a_DSL_for_coordinating_large_groups_of_heterogeneous_actors",
name: "Sparrow: a DSL for coordinating large groups of heterogeneous actors",
tags: "Elixir",
date: Date.new(2017, 9, 1)
},
%{
link: "https://zeam-vm.github.io/papers/callback-thread-2nd-WSA.html",
name:
"Plan to Implementation of Lightweight Callback Thread for Elixir and Improvement of Maximum Concurrent Sessions and Latency of Phoenix (in Japanese)",
tags: "Elixir",
date: Date.new(2018, 5, 12)
},
%{
link:
"https://ipsj.ixsq.nii.ac.jp/ej/index.php?active_action=repository_view_main_item_detail&page_id=13&block_id=8&item_id=190322&item_no=1",
name:
"An Empirical Evaluation to Performance of Elixir for Introducing IoT Systems (in Japanese)",
tags: "Elixir",
date: Date.new(2018, 6, 29)
},
%{
link:
"https://ipsj.ixsq.nii.ac.jp/ej/index.php?active_action=repository_view_main_item_detail&page_id=13&block_id=8&item_id=190626&item_no=1",
name:
"Implementation of Runtime Environments of C++ and Elixir with the Node Programming Model (in Japanese)",
tags: "Elixir",
date: Date.new(2018, 7, 23)
},
%{
link: "https://researchmap.jp/?action=cv_download_main&upload_id=192105",
name:
"A Method Using GPGPU for Super-Parallelization in Elixir Programming (in Japanese)",
tags: "Elixir",
date: Date.new(2018, 8, 1)
},
%{
link: "https://ci.nii.ac.jp/naid/170000150470/",
name:
"Hastega: Parallelization of Linear Regression Using SIMD Instruction for Elixir Programming (in Japanese)",
tags: "Elixir",
date: Date.new(2018, 1, 17)
},
%{
link: "https://ci.nii.ac.jp/naid/170000180471/",
name:
"SumMag: Design and Implementation of an Analyzer an Extension Mechanism by Meta-programming Using Elixir Macros (in Japanese)",
tags: "Elixir",
date: Date.new(2019, 3, 19)
},
%{
link: "https://www.mdpi.com/1424-8220/19/18/4017/htm",
name:
"Elemental: An Open-Source Wireless Hardware and Software Platform for Building Energy and Indoor Environmental Monitoring and Control",
tags: "Elixir, Nerves",
date: Date.new(2019, 9, 18)
},
%{
link: "https://www.info.ucl.ac.be/~pvr/Neirinckx_47311500_Bastin_81031400_2020.pdf",
name: "Sensor fusion at the extreme edge of an internet of things network",
tags: "Elixir, Nerves, Phoenix",
date: Date.new(2020, 8, 16)
}
]
end
end
| 38.745562 | 227 | 0.60843 |
9e10464000bc7f8fdba71fb0752be2dc141bd617 | 2,092 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/creative_specification.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/creative_specification.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/creative_specification.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdExchangeBuyer.V2beta1.Model.CreativeSpecification do
@moduledoc """
Represents information for a creative that is associated with a Programmatic Guaranteed/Preferred Deal in Ad Manager.
## Attributes
* `creativeCompanionSizes` (*type:* `list(GoogleApi.AdExchangeBuyer.V2beta1.Model.AdSize.t)`, *default:* `nil`) - Companion sizes may be filled in only when this is a video creative.
* `creativeSize` (*type:* `GoogleApi.AdExchangeBuyer.V2beta1.Model.AdSize.t`, *default:* `nil`) - The size of the creative.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:creativeCompanionSizes =>
list(GoogleApi.AdExchangeBuyer.V2beta1.Model.AdSize.t()) | nil,
:creativeSize => GoogleApi.AdExchangeBuyer.V2beta1.Model.AdSize.t() | nil
}
field(:creativeCompanionSizes, as: GoogleApi.AdExchangeBuyer.V2beta1.Model.AdSize, type: :list)
field(:creativeSize, as: GoogleApi.AdExchangeBuyer.V2beta1.Model.AdSize)
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.CreativeSpecification do
def decode(value, options) do
GoogleApi.AdExchangeBuyer.V2beta1.Model.CreativeSpecification.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.CreativeSpecification do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.019608 | 186 | 0.756692 |
9e105361f5264e0786210ca2fcd842615dab519c | 150 | exs | Elixir | test/at_sample_app_test.exs | beamkenya/at_sample_app | 9311f0e3381a01f55afb4f2ff8aade74abf43308 | [
"Apache-2.0"
] | 3 | 2020-06-29T19:49:48.000Z | 2020-11-26T14:37:11.000Z | test/at_sample_app_test.exs | beamkenya/at_sample_app | 9311f0e3381a01f55afb4f2ff8aade74abf43308 | [
"Apache-2.0"
] | 5 | 2020-06-16T20:37:47.000Z | 2021-03-03T20:33:43.000Z | test/at_sample_app_test.exs | beamkenya/at_sample_app | 9311f0e3381a01f55afb4f2ff8aade74abf43308 | [
"Apache-2.0"
] | null | null | null | defmodule AtSampleAppTest do
use ExUnit.Case
doctest AtSampleApp
test "greets the world" do
assert AtSampleApp.hello() == :world
end
end
| 16.666667 | 40 | 0.733333 |
9e10568cb890316128cf08b870ec05fc0d7556b7 | 83 | ex | Elixir | lib/chartjs_phoenix_demo_web/views/page_view.ex | jszod/chartjs_phoenix_demo | 648b14d0dcdb5adf4b457adbd29172142f9fc8bf | [
"Apache-2.0"
] | null | null | null | lib/chartjs_phoenix_demo_web/views/page_view.ex | jszod/chartjs_phoenix_demo | 648b14d0dcdb5adf4b457adbd29172142f9fc8bf | [
"Apache-2.0"
] | null | null | null | lib/chartjs_phoenix_demo_web/views/page_view.ex | jszod/chartjs_phoenix_demo | 648b14d0dcdb5adf4b457adbd29172142f9fc8bf | [
"Apache-2.0"
] | null | null | null | defmodule ChartjsPhoenixDemoWeb.PageView do
use ChartjsPhoenixDemoWeb, :view
end
| 20.75 | 43 | 0.855422 |
9e107c83b33c03217ec1178799ccfde26f6edca2 | 2,394 | exs | Elixir | test/memcache_client_transcoder_test.exs | tsharju/memcache_client | de67ac3ce707b111ef9fe3a51c91efe27ae39ad3 | [
"MIT"
] | 15 | 2015-06-01T12:47:33.000Z | 2017-12-15T13:46:42.000Z | test/memcache_client_transcoder_test.exs | tsharju/memcache_client | de67ac3ce707b111ef9fe3a51c91efe27ae39ad3 | [
"MIT"
] | 6 | 2016-03-19T05:57:26.000Z | 2017-05-08T06:33:21.000Z | test/memcache_client_transcoder_test.exs | tsharju/memcache_client | de67ac3ce707b111ef9fe3a51c91efe27ae39ad3 | [
"MIT"
] | 14 | 2015-11-14T14:44:35.000Z | 2017-10-14T19:21:04.000Z | defmodule Memcache.ClientTest.Transcoder do
use ExUnit.Case
test "raw transcoder encode_value" do
assert {:error, {:invalid_value, %{}}} == Memcache.Client.Transcoder.encode_value(%{})
{encoded, data_type} = Memcache.Client.Transcoder.encode_value("test")
assert encoded == "test"
assert data_type == 0x0000
end
test "raw transcoder decode_value" do
assert "test" == Memcache.Client.Transcoder.decode_value("test", 0x0000)
assert {:error, {:invalid_data_type, 2}} == Memcache.Client.Transcoder.decode_value("test", 0x0002)
end
test "json transcoder encode_value" do
Application.put_env(:memcache_client, :transcoder, Memcache.Client.Transcoder.Json)
{encoded, data_type} = Memcache.Client.Transcoder.encode_value(%{})
assert encoded == "{}"
assert data_type == 0x0002
Application.delete_env(:memcache_client, :transcoder)
end
test "json transocder decode_value" do
Application.put_env(:memcache_client, :transcoder, Memcache.Client.Transcoder.Json)
assert %{} == Memcache.Client.Transcoder.decode_value("{}", 0x0002)
assert {:error, {:invalid_data_type, 0}} == Memcache.Client.Transcoder.decode_value("{}", 0x0000)
Application.delete_env(:memcache_client, :transcoder)
end
test "json transocder decode_value with opts" do
Application.put_env(:memcache_client, :transcoder, Memcache.Client.Transcoder.Json)
Application.put_env(:memcache_client, :transcoder_decode_opts, [keys: :atoms])
assert %{test: "test"} == Memcache.Client.Transcoder.decode_value("{\"test\": \"test\"}", 0x0002)
Application.delete_env(:memcache_client, :transcoder)
Application.delete_env(:memcache_client, :transcoder_decode_opts)
end
test "erlang transcoder encode_value" do
Application.put_env(:memcache_client, :transcoder, Memcache.Client.Transcoder.Erlang)
{encoded, data_type} = Memcache.Client.Transcoder.encode_value(%{})
assert encoded == <<131, 116, 0, 0, 0, 0>>
assert data_type == 0x0004
Application.delete_env(:memcache_client, :transcoder)
end
test "erlang transcoder decode_value" do
Application.put_env(:memcache_client, :transcoder, Memcache.Client.Transcoder.Erlang)
assert %{} == Memcache.Client.Transcoder.decode_value(<<131, 116, 0, 0, 0, 0>>, 0x0004)
Application.delete_env(:memcache_client, :transcoder)
end
end
| 35.731343 | 103 | 0.722222 |
9e10b3d5e68ddf2047a432ae88f62797b575ecc3 | 317 | ex | Elixir | brighterx/lib/brighterx/resources/facility.ex | techgaun/dumpster | c2a5394afe759fb99041aea677e9b0bc4bf91aec | [
"Unlicense"
] | 1 | 2019-12-10T22:25:31.000Z | 2019-12-10T22:25:31.000Z | brighterx/lib/brighterx/resources/facility.ex | techgaun/dumpster | c2a5394afe759fb99041aea677e9b0bc4bf91aec | [
"Unlicense"
] | 3 | 2020-10-25T04:40:05.000Z | 2020-10-25T04:48:10.000Z | brighterx/lib/brighterx/resources/facility.ex | techgaun/dumpster | c2a5394afe759fb99041aea677e9b0bc4bf91aec | [
"Unlicense"
] | null | null | null | defmodule Brighterx.Resources.Facility do
@moduledoc """
A facility resource
"""
@derive [Poison.Encoder]
defstruct id: nil,
company_id: nil,
name: nil,
address: nil,
udf: nil,
schedule: nil,
devices: [],
enabled: nil
end
| 19.8125 | 41 | 0.514196 |
9e10dd385401d504d9d0b372bcb32221e659e4e5 | 781 | ex | Elixir | lib/plausible/billing/plans.ex | lizlam/plausible | 886ba62cd814e5ca2d05c51a375bccc753c7c6ff | [
"MIT"
] | null | null | null | lib/plausible/billing/plans.ex | lizlam/plausible | 886ba62cd814e5ca2d05c51a375bccc753c7c6ff | [
"MIT"
] | null | null | null | lib/plausible/billing/plans.ex | lizlam/plausible | 886ba62cd814e5ca2d05c51a375bccc753c7c6ff | [
"MIT"
] | null | null | null | defmodule Plausible.Billing.Plans do
@plans %{
monthly: %{
"10k": %{product_id: "558018", due_now: "$6"},
"100k": %{product_id: "558745", due_now: "$12"},
"1m": %{product_id: "558746", due_now: "$36"},
},
yearly: %{
"10k": %{product_id: "572810", due_now: "$48"},
"100k": %{product_id: "590752", due_now: "$96"},
"1m": %{product_id: "590753", due_now: "$288"}
},
}
def plans do
@plans
end
def allowance(subscription) do
allowed_volume = %{
"free_10k" => 10_000,
"558018" => 10_000,
"572810" => 10_000,
"558745" => 100_000,
"590752" => 100_000,
"558746" => 1_000_000,
"590753" => 1_000_000,
}
allowed_volume[subscription.paddle_plan_id]
end
end
| 23.666667 | 54 | 0.53265 |
9e110e44ea581ad23448f413175309e847772f87 | 367 | ex | Elixir | lib/hl7/2.4/segments/msa.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.4/segments/msa.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.4/segments/msa.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | defmodule HL7.V2_4.Segments.MSA do
@moduledoc false
require Logger
alias HL7.V2_4.{DataTypes}
use HL7.Segment,
fields: [
segment: nil,
acknowledgement_code: nil,
message_control_id: nil,
text_message: nil,
expected_sequence_number: nil,
delayed_acknowledgment_type: nil,
error_condition: DataTypes.Ce
]
end
| 20.388889 | 39 | 0.683924 |
9e1129e668ee87075a8e74bc3168971b7782a8b2 | 583 | exs | Elixir | test/views/error_view_test.exs | mogest/superlific | 375d64e3fce594fa1b2322c1652f2f2e648bfe85 | [
"MIT"
] | 6 | 2016-10-13T02:29:54.000Z | 2017-09-13T18:17:56.000Z | test/views/error_view_test.exs | mogest/superlific | 375d64e3fce594fa1b2322c1652f2f2e648bfe85 | [
"MIT"
] | null | null | null | test/views/error_view_test.exs | mogest/superlific | 375d64e3fce594fa1b2322c1652f2f2e648bfe85 | [
"MIT"
] | null | null | null | defmodule Superlific.ErrorViewTest do
use Superlific.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(Superlific.ErrorView, "404.html", []) ==
"Page not found"
end
test "render 500.html" do
assert render_to_string(Superlific.ErrorView, "500.html", []) ==
"Internal server error"
end
test "render any other" do
assert render_to_string(Superlific.ErrorView, "505.html", []) ==
"Internal server error"
end
end
| 26.5 | 68 | 0.684391 |
9e114e2d7e55b1f5b0f21300bc2d4f7c803e38af | 202 | exs | Elixir | test/chit_chat_web/controllers/page_controller_test.exs | GalPin-Ark/chit_chat | 8cb492b7c6daca82fdbd536f6dbc185c02991730 | [
"MIT"
] | 1 | 2021-09-10T16:49:36.000Z | 2021-09-10T16:49:36.000Z | test/chit_chat_web/controllers/page_controller_test.exs | areski/ex-chitchat | 0ec14e9af6acba40d6708f924b76fb4fbe592dcf | [
"MIT"
] | 2 | 2020-05-22T18:42:14.000Z | 2021-01-25T16:34:38.000Z | test/chit_chat_web/controllers/page_controller_test.exs | areski/ex-chitchat | 0ec14e9af6acba40d6708f924b76fb4fbe592dcf | [
"MIT"
] | null | null | null | defmodule ChitChatWeb.PageControllerTest do
use ChitChatWeb.ConnCase
test "GET /", %{conn: conn} do
conn = get(conn, "/")
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 22.444444 | 60 | 0.683168 |
9e115bcaefd6ace3aca2741b9161e3ae4652ead3 | 913 | exs | Elixir | mix.exs | thatemilio/robotxt | 36b358be7640c94d82d666e453b9d6574f0d4872 | [
"Apache-2.0"
] | null | null | null | mix.exs | thatemilio/robotxt | 36b358be7640c94d82d666e453b9d6574f0d4872 | [
"Apache-2.0"
] | null | null | null | mix.exs | thatemilio/robotxt | 36b358be7640c94d82d666e453b9d6574f0d4872 | [
"Apache-2.0"
] | null | null | null | defmodule Robotxt.MixProject do
use Mix.Project
@version "0.1.3"
def project do
[
app: :robotxt,
version: @version,
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps(),
description: description(),
package: package(),
name: "Robotxt",
source_url: "https://github.com/thatemilio/robotxt"
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ex_doc, "~> 0.22.1", only: :dev, runtime: false},
]
end
defp description do
"Robots.txt parser."
end
defp package do
[
files: ~w(lib README.md mix.exs .formatter.exs),
licenses: ["Apache-2.0"],
links: %{"GitHub" => "https://github.com/thatemilio/robotxt"}
]
end
end
| 19.425532 | 67 | 0.584885 |
9e11746f4dffa10f9509b9f9c0e2aaddc6e048b2 | 678 | exs | Elixir | test/clickhousex/storage_test.exs | moldmn/clickhousex | b39016714e346490ff8e9c4432006827612fba11 | [
"Apache-2.0"
] | 43 | 2018-04-02T07:00:44.000Z | 2020-08-14T18:47:26.000Z | test/clickhousex/storage_test.exs | moldmn/clickhousex | b39016714e346490ff8e9c4432006827612fba11 | [
"Apache-2.0"
] | 21 | 2020-09-07T08:19:59.000Z | 2021-08-25T03:46:38.000Z | test/clickhousex/storage_test.exs | moldmn/clickhousex | b39016714e346490ff8e9c4432006827612fba11 | [
"Apache-2.0"
] | 22 | 2018-08-08T07:45:44.000Z | 2020-09-01T07:58:54.000Z | defmodule Clickhousex.StorageTest do
use ClickhouseCase, async: true
alias Clickhousex.Result
test "can create and drop database", ctx do
assert {:ok, _, %Result{}} = schema(ctx, "CREATE DATABASE other_db")
assert {:ok, _, %Result{}} = schema(ctx, "DROP DATABASE other_db")
end
test "returns correct error when dropping database that doesn't exist", ctx do
assert {:error, %{code: :database_does_not_exists}} = schema(ctx, "DROP DATABASE random_db ")
end
test "returns correct error when creating a database that already exists", ctx do
assert {:error, %{code: :database_already_exists}} = schema(ctx, "CREATE DATABASE {{database}}")
end
end
| 35.684211 | 100 | 0.712389 |
9e11a9a024cc17feb505f0a84c0077f32e89e969 | 1,804 | ex | Elixir | lib/elixir_jobs_web/views/view_helpers.ex | savekirk/elixir_jobs | d7ec0f088a1365f3ae5cbbd6c07c2b3fdde9a946 | [
"MIT"
] | null | null | null | lib/elixir_jobs_web/views/view_helpers.ex | savekirk/elixir_jobs | d7ec0f088a1365f3ae5cbbd6c07c2b3fdde9a946 | [
"MIT"
] | null | null | null | lib/elixir_jobs_web/views/view_helpers.ex | savekirk/elixir_jobs | d7ec0f088a1365f3ae5cbbd6c07c2b3fdde9a946 | [
"MIT"
] | null | null | null | defmodule ElixirJobsWeb.ViewHelpers do
use PhoenixHtmlSanitizer, :markdown_html
def class_with_error(form, field, base_class) do
if error_on_field?(form, field) do
"#{base_class} error"
else
base_class
end
end
def error_on_field?(form, field) do
form.errors
|> Enum.map(fn({attr, _message}) -> attr end)
|> Enum.member?(field)
end
###
# Markdown related functions
###
def sanitized_markdown(nil), do: ""
def sanitized_markdown(text) do
text
|> Earmark.as_html!
|> sanitize
end
def do_strip_tags(text) do
sanitize(text, :strip_tags)
end
###
# XML related functions
###
def xml_strip_tags(text) do
{:safe, text} = do_strip_tags(text)
text
end
def xml_sanitized_markdown(text) do
{:safe, text} = sanitized_markdown(text)
text
end
@doc "Returns a date formatted for humans."
def human_readable_date(date, use_abbrevs? \\ true) do
if use_abbrevs? && this_year?(date) do
cond do
today?(date) ->
"Today"
yesterday?(date) ->
"Yesterday"
true ->
ElixirJobs.Date.strftime(date, "%e %b")
end
else
ElixirJobs.Date.strftime(date, "%e %b %Y")
end
end
@doc "Returns a date formatted for RSS clients."
def xml_readable_date(date) do
ElixirJobs.Date.strftime(date, "%e %b %Y %T %z")
end
###
# Private functions
###
defp this_year?(date), do: date.year == Ecto.DateTime.utc.year
defp today?(date) do
now = Ecto.DateTime.utc
date.day == now.day && date.month == now.month && date.year == now.year
end
def yesterday?(date) do
now = Ecto.DateTime.utc
difference = ElixirJobs.Date.diff(now, date)
difference < 2 * 24 * 60 * 60 && difference > 1 * 24 * 60 * 60
end
end
| 21.223529 | 75 | 0.622506 |
9e11b137100ee3da716469fcbd78e3d4ae2dc337 | 789 | exs | Elixir | 2021/day3/test/day3_test.exs | SuddenGunter/adventofcode | 702dd927b1d23c4c5c4b2e67898f4b3c914abfcf | [
"MIT"
] | null | null | null | 2021/day3/test/day3_test.exs | SuddenGunter/adventofcode | 702dd927b1d23c4c5c4b2e67898f4b3c914abfcf | [
"MIT"
] | null | null | null | 2021/day3/test/day3_test.exs | SuddenGunter/adventofcode | 702dd927b1d23c4c5c4b2e67898f4b3c914abfcf | [
"MIT"
] | null | null | null | defmodule Day3Test do
use ExUnit.Case
doctest Task1
test "dinary diagnostic (test case)" do
assert Task1.solution([
"00100",
"11110",
"10110",
"10111",
"10101",
"01111",
"00111",
"11100",
"10000",
"11001",
"00010",
"01010"
]) == 198
end
test "dinary diagnostic (test case task 2)" do
assert Task2.solution([
"00100",
"11110",
"10110",
"10111",
"10101",
"01111",
"00111",
"11100",
"10000",
"11001",
"00010",
"01010"
]) == 230
end
end
| 20.230769 | 48 | 0.357414 |
9e11c67c5a88a70ab2899815a303e1ed56325841 | 4,305 | exs | Elixir | test/oli/delivery/sections/enrollments_browse_test.exs | AnkitKadamATS/oli-torus | 3f9d5e8d568684b28d2ed65e17f796ae4c27c072 | [
"MIT"
] | 45 | 2020-04-17T15:40:27.000Z | 2022-03-25T00:13:30.000Z | test/oli/delivery/sections/enrollments_browse_test.exs | AnkitKadamATS/oli-torus | 3f9d5e8d568684b28d2ed65e17f796ae4c27c072 | [
"MIT"
] | 944 | 2020-02-13T02:37:01.000Z | 2022-03-31T17:50:07.000Z | test/oli/delivery/sections/enrollments_browse_test.exs | Simon-Initiative/oli-torus | 7f3eaeaa18ca8837e5afbff3e8899ae13b49de8b | [
"MIT"
] | 23 | 2020-07-28T03:36:13.000Z | 2022-03-17T14:29:02.000Z | defmodule Oli.Delivery.Sections.EnrollmentsBrowseTest do
use Oli.DataCase
alias Oli.Delivery.Sections
alias Oli.Repo.{Paging, Sorting}
alias Oli.Delivery.Sections.{EnrollmentBrowseOptions}
alias Lti_1p3.Tool.ContextRoles
import Ecto.Query, warn: false
def make_sections(project, institution, prefix, n, attrs) do
65..(65 + (n - 1))
|> Enum.map(fn value -> List.to_string([value]) end)
|> Enum.map(fn value -> make(project, institution, "#{prefix}-#{value}", attrs) end)
end
def browse(section, offset, field, direction, text_search, is_student, is_instructor) do
Sections.browse_enrollments(
section,
%Paging{offset: offset, limit: 3},
%Sorting{field: field, direction: direction},
%EnrollmentBrowseOptions{
is_student: is_student,
is_instructor: is_instructor,
text_search: text_search
}
)
end
def make(project, institution, title, attrs) do
{:ok, section} =
Sections.create_section(
Map.merge(
%{
title: title,
timezone: "1",
registration_open: true,
context_id: "1",
institution_id:
if is_nil(institution) do
nil
else
institution.id
end,
base_project_id: project.id
},
attrs
)
)
section
end
# Create and enroll 11 users, with 6 being students and 5 being instructors
def enroll(section) do
to_attrs = fn v ->
%{
sub: UUID.uuid4(),
name: "#{v}",
given_name: "#{v}",
family_name: "#{v}",
middle_name: "",
picture: "https://platform.example.edu/jane.jpg",
email: "test#{v}@example.edu",
locale: "en-US"
}
end
Enum.map(1..11, fn v -> to_attrs.(v) |> user_fixture() end)
|> Enum.with_index(fn user, index ->
roles =
case rem(index, 2) do
0 ->
[ContextRoles.get_role(:context_learner)]
_ ->
[ContextRoles.get_role(:context_learner), ContextRoles.get_role(:context_instructor)]
end
# Between the first two enrollments, delay enough that we get distinctly different
# enrollment times
case index do
1 -> :timer.sleep(1500)
_ -> true
end
Sections.enroll(user.id, section.id, roles)
end)
end
describe "basic browsing" do
setup do
map = Seeder.base_project_with_resource2()
section1 = make(map.project, map.institution, "a", %{})
section2 = make(map.project, map.institution, "b", %{})
enroll(section1)
Map.put(map, :section1, section1) |> Map.put(:section2, section2)
end
test "basic sorting", %{section1: section1, section2: section2} do
# Verify that retrieving all users works
results = browse(section1, 0, :name, :asc, nil, false, false)
assert length(results) == 3
assert hd(results).total_count == 11
# Verify that retrieving only instructors works
results = browse(section1, 0, :name, :asc, nil, false, true)
assert length(results) == 3
assert hd(results).total_count == 5
# Verify that retrieving only students works
results = browse(section1, 0, :name, :asc, nil, true, false)
assert length(results) == 3
assert hd(results).total_count == 6
# Verify that retrieving only students works WITH text search
results = browse(section1, 0, :name, :asc, "5", true, false)
assert length(results) == 1
assert hd(results).total_count == 1
# Verify that sorting by enrollment_date works
results = browse(section1, 0, :enrollment_date, :asc, nil, false, false)
assert length(results) == 3
assert hd(results).total_count == 11
assert hd(results).name == "1"
# Verify offset and reverse sort for enrollment_date
results = browse(section1, 10, :enrollment_date, :desc, nil, false, false)
assert length(results) == 1
assert hd(results).total_count == 11
assert hd(results).name == "1"
# Verify that zero enrollments return for section2
results = browse(section2, 00, :enrollment_date, :desc, nil, false, false)
assert length(results) == 0
end
end
end
| 30.531915 | 97 | 0.609524 |
9e11d049b8fe9a5116c7e4d246a3af6bdb1c93e1 | 4,746 | ex | Elixir | lib/elixir/lib/task/supervised.ex | elkinsd/elixir | 810965e193cb57b82363e7c0c97b719743b7964f | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/task/supervised.ex | elkinsd/elixir | 810965e193cb57b82363e7c0c97b719743b7964f | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/task/supervised.ex | elkinsd/elixir | 810965e193cb57b82363e7c0c97b719743b7964f | [
"Apache-2.0"
] | null | null | null | defmodule Task.Supervised do
@moduledoc false
@ref_timeout 5_000
def start(info, fun) do
{:ok, :proc_lib.spawn(__MODULE__, :noreply, [info, fun])}
end
def start_link(info, fun) do
{:ok, :proc_lib.spawn_link(__MODULE__, :noreply, [info, fun])}
end
def start_link(caller, link, info, fun) do
{:ok, spawn_link(caller, link, info, fun)}
end
def spawn_link(caller, link \\ :nolink, info, fun) do
:proc_lib.spawn_link(__MODULE__, :reply, [caller, link, info, fun])
end
def reply(caller, link, info, mfa) do
initial_call(mfa)
case link do
:link ->
try do
Process.link(caller)
catch
:error, :noproc ->
exit({:shutdown, :noproc})
end
reply(caller, nil, @ref_timeout, info, mfa)
:monitor ->
mref = Process.monitor(caller)
reply(caller, mref, @ref_timeout, info, mfa)
:nolink ->
reply(caller, nil, :infinity, info, mfa)
end
end
defp reply(caller, mref, timeout, info, mfa) do
receive do
{^caller, ref} ->
_ = if mref, do: Process.demonitor(mref, [:flush])
send caller, {ref, do_apply(info, mfa)}
{:DOWN, ^mref, _, _, reason} when is_reference(mref) ->
exit({:shutdown, reason})
after
# There is a race condition on this operation when working across
# node that manifests if a "Task.Supervisor.async/2" call is made
# while the supervisor is busy spawning previous tasks.
#
# Imagine the following workflow:
#
# 1. The nodes disconnect
# 2. The async call fails and is caught, the calling process does not exit
# 3. The task is spawned and links to the calling process, causing the nodes to reconnect
# 4. The calling process has not exited and so does not send its monitor reference
# 5. The spawned task waits forever for the monitor reference so it can begin
#
# We have solved this by specifying a timeout of 5000 seconds.
# Given no work is done in the client between the task start and
# sending the reference, 5000 should be enough to not raise false
# negatives unless the nodes are indeed not available.
#
# The same situation could occur with "Task.Supervisor.async_nolink/2",
# except a monitor is used instead of a link.
timeout ->
exit(:timeout)
end
end
def noreply(info, mfa) do
initial_call(mfa)
do_apply(info, mfa)
end
defp initial_call(mfa) do
Process.put(:"$initial_call", get_initial_call(mfa))
end
defp get_initial_call({:erlang, :apply, [fun, []]}) when is_function(fun, 0) do
{:module, module} = :erlang.fun_info(fun, :module)
{:name, name} = :erlang.fun_info(fun, :name)
{module, name, 0}
end
defp get_initial_call({mod, fun, args}) do
{mod, fun, length(args)}
end
defp do_apply(info, {module, fun, args} = mfa) do
try do
apply(module, fun, args)
catch
:error, value ->
reason = {value, System.stacktrace()}
exit(info, mfa, reason, reason)
:throw, value ->
reason = {{:nocatch, value}, System.stacktrace()}
exit(info, mfa, reason, reason)
:exit, value ->
exit(info, mfa, {value, System.stacktrace()}, value)
end
end
defp exit(_info, _mfa, _log_reason, reason)
when reason == :normal
when reason == :shutdown
when tuple_size(reason) == 2 and elem(reason, 0) == :shutdown do
exit(reason)
end
defp exit(info, mfa, log_reason, reason) do
{fun, args} = get_running(mfa)
:error_logger.format(
'** Task ~p terminating~n' ++
'** Started from ~p~n' ++
'** When function == ~p~n' ++
'** arguments == ~p~n' ++
'** Reason for termination == ~n' ++
'** ~p~n', [self(), get_from(info), fun, args, get_reason(log_reason)])
exit(reason)
end
defp get_from({node, pid_or_name}) when node == node(), do: pid_or_name
defp get_from(other), do: other
defp get_running({:erlang, :apply, [fun, []]}) when is_function(fun, 0), do: {fun, []}
defp get_running({mod, fun, args}), do: {:erlang.make_fun(mod, fun, length(args)), args}
defp get_reason({:undef, [{mod, fun, args, _info} | _] = stacktrace} = reason)
when is_atom(mod) and is_atom(fun) do
cond do
:code.is_loaded(mod) === false ->
{:"module could not be loaded", stacktrace}
is_list(args) and not function_exported?(mod, fun, length(args)) ->
{:"function not exported", stacktrace}
is_integer(args) and not function_exported?(mod, fun, args) ->
{:"function not exported", stacktrace}
true ->
reason
end
end
defp get_reason(reason) do
reason
end
end
| 31.223684 | 95 | 0.617151 |
9e11dee58b6677659cb0a7f3bbbd7f372be01975 | 8,231 | exs | Elixir | lib/ex_unit/test/ex_unit_test.exs | diogovk/elixir | 7b8213affaad38b50afaa3dfc3a43717f35ba4e7 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/test/ex_unit_test.exs | diogovk/elixir | 7b8213affaad38b50afaa3dfc3a43717f35ba4e7 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/test/ex_unit_test.exs | diogovk/elixir | 7b8213affaad38b50afaa3dfc3a43717f35ba4e7 | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule ExUnitTest do
use ExUnit.Case
import ExUnit.CaptureIO
test "it supports many runs" do
defmodule SampleTest do
use ExUnit.Case
test "true" do
assert false
end
test "false" do
assert false
end
end
assert capture_io(fn ->
assert ExUnit.run == %{failures: 2, skipped: 0, total: 2}
end) =~ "2 tests, 2 failures"
end
test "it doesn't hang on exits" do
defmodule EventServerTest do
use ExUnit.Case
test "spawn and crash" do
spawn_link(fn ->
exit :foo
end)
receive after: (1000 -> :ok)
end
end
assert capture_io(fn ->
assert ExUnit.run == %{failures: 1, skipped: 0, total: 1}
end) =~ "1 test, 1 failure"
end
test "it supports timeouts" do
defmodule TimeoutTest do
use ExUnit.Case
@tag timeout: 10
test "ok" do
:timer.sleep(:infinity)
end
end
output = capture_io(fn -> ExUnit.run end)
assert output =~ "** (ExUnit.TimeoutError) test timed out after 10ms"
assert output =~ ~r"\(stdlib\) timer\.erl:\d+: :timer\.sleep/1"
end
test "it supports configured timeout" do
defmodule ConfiguredTimeoutTest do
use ExUnit.Case
test "ok" do
:timer.sleep(:infinity)
end
end
ExUnit.configure(timeout: 5)
output = capture_io(fn -> ExUnit.run end)
assert output =~ "** (ExUnit.TimeoutError) test timed out after 5ms"
after
ExUnit.configure(timeout: 60_000)
end
test "filtering cases with tags" do
defmodule ParityTest do
use ExUnit.Case
test "zero", do: :ok
@tag even: false
test "one", do: :ok
@tag even: true
test "two", do: assert 1 == 2
@tag even: false
test "three", do: :ok
end
test_cases = ExUnit.Server.start_run
{result, output} = run_with_filter([], test_cases)
assert result == %{failures: 1, skipped: 0, total: 4}
assert output =~ "4 tests, 1 failure"
{result, output} = run_with_filter([exclude: [even: true]], test_cases)
assert result == %{failures: 0, skipped: 1, total: 4}
assert output =~ "4 tests, 0 failures, 1 skipped"
{result, output} = run_with_filter([exclude: :even], test_cases)
assert result == %{failures: 0, skipped: 3, total: 4}
assert output =~ "4 tests, 0 failures, 3 skipped"
{result, output} = run_with_filter([exclude: :even, include: [even: true]], test_cases)
assert result == %{failures: 1, skipped: 2, total: 4}
assert output =~ "4 tests, 1 failure, 2 skipped"
{result, output} = run_with_filter([exclude: :test, include: [even: true]], test_cases)
assert result == %{failures: 1, skipped: 3, total: 4}
assert output =~ "4 tests, 1 failure, 3 skipped"
end
test "log capturing" do
defmodule LogCapturingTest do
use ExUnit.Case
require Logger
setup_all do
:ok = Logger.remove_backend(:console)
on_exit(fn -> Logger.add_backend(:console, flush: true) end)
end
@tag :capture_log
test "one" do
Logger.debug("one")
assert 1 == 1
end
@tag :capture_log
test "two" do
Logger.debug("two")
assert 1 == 2
end
@tag capture_log: []
test "three" do
Logger.debug("three")
assert 1 == 2
end
test "four" do
Logger.debug("four")
assert 1 == 2
end
end
output = capture_io(&ExUnit.run/0)
assert output =~ "[debug] two"
refute output =~ "[debug] one"
assert output =~ "[debug] three"
refute output =~ "[debug] four"
end
test "supports multi errors" do
capture_io :stderr, fn ->
defmodule MultiTest do
use ExUnit.Case
test "multi" do
error1 =
try do
assert 1 = 2
rescue e in ExUnit.AssertionError ->
{:error, e, System.stacktrace}
end
error2 =
try do
assert 3 > 4
rescue e in ExUnit.AssertionError ->
{:error, e, System.stacktrace}
end
raise ExUnit.MultiError, errors: [error1, error2]
end
end
end
output = capture_io(fn ->
assert ExUnit.run == %{failures: 1, skipped: 0, total: 1}
end)
assert output =~ "1 test, 1 failure"
assert output =~ "1) test multi (ExUnitTest.MultiTest)"
assert output =~ "Failure #1"
assert output =~ "Failure #2"
assert_raise ExUnit.MultiError, ~r/oops/, fn ->
error = {:error, RuntimeError.exception("oops"), System.stacktrace}
raise ExUnit.MultiError, errors: [error]
end
end
test "registers only the first test with any given name" do
capture_io :stderr, fn ->
defmodule TestWithSameNames do
use ExUnit.Case
test "same name, different outcome" do
assert 1 == 1
end
test "same name, different outcome" do
assert 1 == 2
end
end
end
assert capture_io(fn ->
assert ExUnit.run == %{failures: 0, skipped: 0, total: 1}
end) =~ "1 test, 0 failure"
end
test "produces error on not implemented tests" do
defmodule TestNotImplemented do
use ExUnit.Case
setup context do
assert context[:not_implemented]
:ok
end
test "this is not implemented yet"
end
output = capture_io(fn ->
assert ExUnit.run == %{failures: 1, skipped: 0, total: 1}
end)
assert output =~ "Not yet implemented"
assert output =~ "1 test, 1 failure"
end
test "skips tagged test with skip" do
defmodule TestSkipped do
use ExUnit.Case
setup context do
assert context[:not_implemented]
:ok
end
@tag :skip
test "this will raise", do: raise "oops"
@tag skip: "won't work"
test "this will also raise", do: raise "oops"
end
output = capture_io(fn ->
assert ExUnit.run == %{failures: 0, skipped: 2, total: 2}
end)
assert output =~ "2 tests, 0 failures, 2 skipped"
end
test "filtering cases with :case tag" do
defmodule FirstTestCase do
use ExUnit.Case
test "ok", do: :ok
end
defmodule SecondTestCase do
use ExUnit.Case
test "false", do: assert false
end
test_cases = ExUnit.Server.start_run
{result, output} = run_with_filter([exclude: :case], test_cases)
assert result == %{failures: 0, skipped: 2, total: 2}
assert output =~ "2 tests, 0 failures, 2 skipped"
{result, output} =
[exclude: :test, include: [case: "ExUnitTest.SecondTestCase"]]
|> run_with_filter(test_cases)
assert result == %{failures: 1, skipped: 1, total: 2}
assert output =~ "1) test false (ExUnitTest.SecondTestCase)"
assert output =~ "2 tests, 1 failure, 1 skipped"
end
test "raises on reserved tag in module" do
assert_raise RuntimeError, "cannot set tag :file because it is reserved by ExUnit", fn ->
defmodule ReservedTag do
use ExUnit.Case
setup do
{:ok, file: :foo}
end
@tag file: "oops"
test "sample", do: :ok
end
end
end
test "raises on reserved tag in setup" do
defmodule ReservedSetupTag do
use ExUnit.Case
setup do
{:ok, file: :foo}
end
test "sample", do: :ok
end
output = capture_io(fn ->
assert ExUnit.run == %{failures: 1, skipped: 0, total: 1}
end)
assert output =~ "trying to set reserved field :file"
end
test "does not raise on reserved tag in setup_all (lower priority)" do
defmodule ReservedSetupAllTag do
use ExUnit.Case
setup_all do
{:ok, file: :foo}
end
test "sample", do: :ok
end
capture_io(fn ->
assert ExUnit.run == %{failures: 0, skipped: 0, total: 1}
end)
end
defp run_with_filter(filters, {async, sync, load_us}) do
opts = Keyword.merge(ExUnit.configuration, filters)
output = capture_io fn ->
Process.put(:capture_result, ExUnit.Runner.run(async, sync, opts, load_us))
end
{Process.get(:capture_result), output}
end
end
| 24.208824 | 93 | 0.595553 |
9e11fcf929e7b062e435a562e6356dc9dba8f46a | 2,392 | ex | Elixir | lib/isbn.ex | renanvy/isbnex | 1572ba494a2ae2450866c910860692455e8364db | [
"MIT"
] | 6 | 2020-04-16T03:16:32.000Z | 2020-04-21T15:23:04.000Z | lib/isbn.ex | renanvy/isbnex | 1572ba494a2ae2450866c910860692455e8364db | [
"MIT"
] | 1 | 2020-12-19T18:54:29.000Z | 2020-12-19T18:54:29.000Z | lib/isbn.ex | renanvy/isbnex | 1572ba494a2ae2450866c910860692455e8364db | [
"MIT"
] | null | null | null | defmodule ISBN do
@moduledoc """
Documentation for ISBN.
"""
@isbn13_multipliers [1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3]
@isbn10_multipliers [1, 2, 3, 4, 5, 6, 7, 8, 9]
@doc """
Checks if the given string is a valid ISBN.
Works with both ISBN-10 and ISBN-13. Allows hyphens and spaces in the string.
## Examples
iex> ISBN.valid?("9971502100")
true
iex> ISBN.valid?("978-03-0640-615-7")
true
"""
def valid?(isbn) when is_binary(isbn) do
last_digit = isbn |> String.trim() |> String.last()
digits =
isbn
|> remove_spaces_and_dashes()
|> String.slice(0..-2)
|> String.codepoints()
|> Enum.map(&String.to_integer/1)
reveal_verifier(digits) == last_digit
end
def valid?(_), do: false
@doc """
Returns ISBN formatted.
## Examples
iex> ISBN.format("9992158107")
"99-9215-810-7"
iex> ISBN.format("9992158106")
nil
"""
def format(isbn) when not is_binary(isbn), do: nil
def format(isbn) do
case valid?(isbn) do
true ->
isbn
|> remove_spaces_and_dashes()
|> String.codepoints()
|> do_format()
_ ->
nil
end
end
defp do_format(digits) when length(digits) == 10 do
digits
|> List.insert_at(2, "-")
|> List.insert_at(7, "-")
|> List.insert_at(11, "-")
|> Enum.join()
end
defp do_format(digits) when length(digits) == 13 do
digits
|> List.insert_at(3, "-")
|> List.insert_at(6, "-")
|> List.insert_at(11, "-")
|> List.insert_at(15, "-")
|> Enum.join()
end
defp do_format(_isbn), do: nil
defp reveal_verifier(digits) when length(digits) == 9 do
acc = calculate(digits, @isbn10_multipliers)
rem = rem(acc, 11)
if rem == 10, do: "X", else: Integer.to_string(rem)
end
defp reveal_verifier(digits) when length(digits) == 12 do
acc = calculate(digits, @isbn13_multipliers)
rem = rem(acc, 10)
if rem == 0, do: "0", else: Integer.to_string(10 - rem)
end
defp reveal_verifier(_isbn), do: nil
defp calculate(digits, multipliers) do
multipliers
|> Enum.zip(digits)
|> Enum.reduce(0, fn {multiplier, digit}, acc ->
acc + multiplier * digit
end)
end
defp remove_spaces_and_dashes(isbn) do
isbn
|> String.trim()
|> String.replace("-", "")
|> String.replace(" ", "")
end
end
| 20.982456 | 79 | 0.585702 |
9e120a7e8d11a3e7d72812ba9c3fd17b2fa36d22 | 671 | ex | Elixir | lib/web/views/access_view.ex | smartlogic/Challenge_gov | b4203d1fcfb742dd17ecfadb9e9c56ad836d4254 | [
"CC0-1.0"
] | 9 | 2020-02-26T20:24:38.000Z | 2022-03-22T21:14:52.000Z | lib/web/views/access_view.ex | smartlogic/Challenge_gov | b4203d1fcfb742dd17ecfadb9e9c56ad836d4254 | [
"CC0-1.0"
] | 15 | 2020-04-22T19:33:24.000Z | 2022-03-26T15:11:17.000Z | lib/web/views/access_view.ex | smartlogic/Challenge_gov | b4203d1fcfb742dd17ecfadb9e9c56ad836d4254 | [
"CC0-1.0"
] | 4 | 2020-04-27T22:58:57.000Z | 2022-01-14T13:42:09.000Z | defmodule Web.AccessView do
use Web, :view
def recertification_heading_by_status(user) do
case user.status == "decertified" do
true ->
[
content_tag(:h4, "Your account must be recertified.", class: "mt-5"),
content_tag(:p, "Request recertification by submitting the following:", class: "mt-5")
]
false ->
[
content_tag(:p, "Request recertification by submitting the following:", class: "mt-5")
]
end
end
def request_type_by_status(user) do
case user.status do
"decertified" ->
"recertification"
"deactivated" ->
"reactivation"
end
end
end
| 23.137931 | 96 | 0.602086 |
9e120d287da5b3387056f6f234672f4341e88296 | 66 | exs | Elixir | test/test_helper.exs | jchristopherinc/mars | a109958cb549ede8d983c3af8183d52528a5eaea | [
"MIT"
] | 2 | 2020-08-28T19:17:33.000Z | 2020-09-13T18:49:20.000Z | test/test_helper.exs | jchristopherinc/mars | a109958cb549ede8d983c3af8183d52528a5eaea | [
"MIT"
] | 5 | 2018-10-28T14:39:26.000Z | 2019-01-31T17:23:36.000Z | test/test_helper.exs | jchristopherinc/mars | a109958cb549ede8d983c3af8183d52528a5eaea | [
"MIT"
] | null | null | null | ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(Mars.Repo, :manual)
| 22 | 50 | 0.772727 |
9e1214c19e5d1764018ffc4fe4e3250272b821fa | 138 | exs | Elixir | test/estuary_test.exs | maynewong/estuary | 49158372a2ff1a93ec72166f7aaa83ff79973184 | [
"MIT"
] | null | null | null | test/estuary_test.exs | maynewong/estuary | 49158372a2ff1a93ec72166f7aaa83ff79973184 | [
"MIT"
] | null | null | null | test/estuary_test.exs | maynewong/estuary | 49158372a2ff1a93ec72166f7aaa83ff79973184 | [
"MIT"
] | null | null | null | defmodule EstuaryTest do
use ExUnit.Case
doctest Estuary
test "greets the world" do
assert Estuary.hello() == :world
end
end
| 15.333333 | 36 | 0.710145 |
9e122c2b8c5cfd273bc66d1cf90bdd8a7c683ab3 | 2,501 | ex | Elixir | web/models/post.ex | kentcdodds/changelog.com | e1c0d7ee5d47dc83dd443d623adb0f07e4acb28d | [
"MIT"
] | null | null | null | web/models/post.ex | kentcdodds/changelog.com | e1c0d7ee5d47dc83dd443d623adb0f07e4acb28d | [
"MIT"
] | null | null | null | web/models/post.ex | kentcdodds/changelog.com | e1c0d7ee5d47dc83dd443d623adb0f07e4acb28d | [
"MIT"
] | null | null | null | defmodule Changelog.Post do
use Changelog.Web, :model
alias Changelog.Regexp
schema "posts" do
field :title, :string
field :slug, :string
field :guid, :string
field :tldr, :string
field :body, :string
field :published, :boolean, default: false
field :published_at, Timex.Ecto.DateTime
belongs_to :author, Changelog.Person
has_many :post_channels, Changelog.PostChannel, on_delete: :delete_all
has_many :channels, through: [:post_channels, :channel]
timestamps()
end
def admin_changeset(struct, params \\ %{}) do
struct
|> cast(params, ~w(title slug author_id published published_at body tldr))
|> validate_required([:title, :slug, :author_id])
|> validate_format(:slug, Regexp.slug, message: Regexp.slug_message)
|> unique_constraint(:slug)
|> validate_published_has_published_at
|> cast_assoc(:post_channels)
end
def published(query \\ __MODULE__) do
from p in query,
where: p.published == true,
where: p.published_at <= ^Timex.now
end
def scheduled(query \\ __MODULE__) do
from p in query,
where: p.published == true,
where: p.published_at > ^Timex.now
end
def unpublished(query \\ __MODULE__) do
from p in query, where: p.published == false
end
def newest_first(query \\ __MODULE__, field \\ :published_at) do
from e in query, order_by: [desc: ^field]
end
def newest_last(query \\ __MODULE__, field \\ :published_at) do
from e in query, order_by: [asc: ^field]
end
def limit(query, count) do
from e in query, limit: ^count
end
def search(query, search_term) do
from e in query,
where: fragment("search_vector @@ plainto_tsquery('english', ?)", ^search_term)
end
def is_public(post, as_of \\ Timex.now) do
post.published && post.published_at <= as_of
end
def preload_all(post) do
post
|> preload_author
|> preload_channels
end
def preload_author(post) do
post
|> Repo.preload(:author)
end
def preload_channels(post) do
post
|> Repo.preload(post_channels: {Changelog.PostChannel.by_position, :channel})
|> Repo.preload(:channels)
end
defp validate_published_has_published_at(changeset) do
published = get_field(changeset, :published)
published_at = get_field(changeset, :published_at)
if published && is_nil(published_at) do
add_error(changeset, :published_at, "can't be blank when published")
else
changeset
end
end
end
| 25.01 | 85 | 0.681327 |
9e1233c946a38aa05eae7bd2c243bcf201043e4e | 948 | exs | Elixir | test/bucket_test.exs | MikaAK/riak-elixir-client | c58700ec30a451a46b226a09387144becc47d5ae | [
"Apache-2.0"
] | 193 | 2015-01-04T15:36:15.000Z | 2022-03-17T21:31:57.000Z | test/bucket_test.exs | MikaAK/riak-elixir-client | c58700ec30a451a46b226a09387144becc47d5ae | [
"Apache-2.0"
] | 51 | 2015-02-12T02:32:23.000Z | 2020-09-22T11:10:56.000Z | test/bucket_test.exs | MikaAK/riak-elixir-client | c58700ec30a451a46b226a09387144becc47d5ae | [
"Apache-2.0"
] | 54 | 2015-03-05T01:10:33.000Z | 2021-09-27T10:40:26.000Z | defmodule Riak.BucketTest do
use Riak.Case
@moduletag :riak1
@tag timeout: 10000
test "list bucket", context do
{:ok, buckets} = Riak.Bucket.list context[:pid]
assert is_list(buckets)
end
test "list! bucket", context do
buckets = Riak.Bucket.list! context[:pid]
assert is_list(buckets)
end
test "list keys", context do
{:ok, keys} = Riak.Bucket.keys context[:pid], "user"
assert is_list(keys)
end
test "list! keys", context do
keys = Riak.Bucket.keys! context[:pid], "user"
assert is_list(keys)
end
test "bucket props", context do
pid = context[:pid]
assert :ok == Riak.Bucket.put pid, "user", [{:notfound_ok, false}]
{:ok, props} = Riak.Bucket.get pid, "user"
assert is_list(props)
assert props[:notfound_ok] == false
assert :ok == Riak.Bucket.reset pid, "user"
{:ok, props} = Riak.Bucket.get pid, "user"
assert props[:notfound_ok] == true
end
end
| 23.121951 | 70 | 0.644515 |
9e12390738539437e9fd56c0a3911f8950ee0307 | 2,798 | ex | Elixir | lib/triton/validate.ex | gjaldon/triton | 0a9bf3490816898c3a5012351b4e2830e71a14b6 | [
"MIT"
] | 1 | 2020-05-16T07:34:47.000Z | 2020-05-16T07:34:47.000Z | lib/triton/validate.ex | gjaldon/triton | 0a9bf3490816898c3a5012351b4e2830e71a14b6 | [
"MIT"
] | null | null | null | lib/triton/validate.ex | gjaldon/triton | 0a9bf3490816898c3a5012351b4e2830e71a14b6 | [
"MIT"
] | 1 | 2019-12-20T16:56:19.000Z | 2019-12-20T16:56:19.000Z | defmodule Triton.Validate do
def coerce(query) do
with {:ok, query} <- validate(query) do
fields = query[:__schema__].__fields__
{:ok, Enum.map(query, fn x -> coerce(x, fields) end)}
end
end
def validate(query) do
case Triton.Helper.query_type(query) do
{:error, err} -> {:error, err.message}
type -> validate(type, query, query[:__schema__].__fields__)
end
end
def validate(:insert, query, schema) do
data = query[:prepared] && query[:prepared] ++ (query[:insert] |> Enum.filter(fn {_, v} -> !is_atom(v) end)) || query[:insert]
vex = schema |> Enum.filter(fn({_, opts}) -> opts[:opts][:validators] end) |> Enum.map(fn {field, opts} -> {field, opts[:opts][:validators]} end)
case Vex.errors(data ++ [_vex: vex]) do
[] -> {:ok, query}
err_list -> {:error, err_list |> Triton.Error.vex_error}
end
end
def validate(:update, query, schema) do
data = query[:prepared] && query[:prepared] ++ (query[:update] |> Enum.filter(fn {_, v} -> !is_atom(v) end)) || query[:update]
fields_to_validate = data |> Enum.map(&(elem(&1, 0)))
vex = schema |> Enum.filter(fn({_, opts}) -> opts[:opts][:validators] end) |> Enum.map(fn {field, opts} -> {field, opts[:opts][:validators]} end) |> Enum.filter(&(elem(&1, 0) in fields_to_validate))
case Vex.errors(data ++ [_vex: vex]) do
[] -> {:ok, query}
err_list -> {:error, err_list |> Triton.Error.vex_error}
end
end
def validate(_, query, _), do: {:ok, query}
defp coerce({:__schema__, v}, _), do: {:__schema__, v}
defp coerce({:__table__, v}, _), do: {:__table__, v}
defp coerce({k, v}, fields), do: {k, coerce(v, fields)}
defp coerce(fragments, fields) when is_list(fragments), do: fragments |> Enum.map(fn fragment -> coerce_fragment(fragment, fields) end)
defp coerce(non_list, _), do: non_list
defp coerce_fragment({k, v}, fields) when is_list(v), do: {k, v |> Enum.map(fn {c, v} -> coerce_fragment({k, c, v}, fields) end)}
defp coerce_fragment({k, v}, fields), do: {k, coerced_value(v, fields[k][:type])}
defp coerce_fragment({_, :in, v}, _), do: {:in, v}
defp coerce_fragment({k, c, v}, fields), do: {c, coerced_value(v, fields[k][:type])}
defp coerce_fragment(x, _), do: x
defp coerced_value(value, _) when is_atom(value), do: value
defp coerced_value(value, :text) when not is_binary(value), do: to_string(value)
defp coerced_value(value, :bigint) when is_binary(value), do: String.to_integer(value)
defp coerced_value(value, :int) when is_binary(value), do: String.to_integer(value)
defp coerced_value(value, :smallint) when is_binary(value), do: String.to_integer(value)
defp coerced_value(value, :varint) when is_binary(value), do: String.to_integer(value)
defp coerced_value(value, _), do: value
end
| 49.964286 | 202 | 0.648678 |
9e125c29881fd674bd4e708ab5053734c7ce12c1 | 17,567 | ex | Elixir | lib/chart/pointplot.ex | zdenal/contex | 6dfa0507ffd79573e5308a83bc6c9d025b6c23ad | [
"MIT"
] | 455 | 2020-01-15T22:21:40.000Z | 2022-03-29T23:20:45.000Z | lib/chart/pointplot.ex | zdenal/contex | 6dfa0507ffd79573e5308a83bc6c9d025b6c23ad | [
"MIT"
] | 48 | 2020-02-10T06:19:17.000Z | 2022-03-29T03:02:52.000Z | lib/chart/pointplot.ex | zdenal/contex | 6dfa0507ffd79573e5308a83bc6c9d025b6c23ad | [
"MIT"
] | 30 | 2020-01-15T22:21:35.000Z | 2022-03-10T18:11:51.000Z | defmodule Contex.PointPlot do
@moduledoc """
A simple point plot, plotting points showing y values against x values.
It is possible to specify multiple y columns with the same x column. It is not
yet possible to specify multiple independent series.
The x column can either be numeric or date time data. If numeric, a
`Contex.ContinuousLinearScale` is used to scale the values to the plot,
and if date time, a `Contex.TimeScale` is used.
Fill colours for each y column can be specified with `colours/2`.
A column in the dataset can optionally be used to control the colours. See
`colours/2` and `set_colour_col_name/2`
"""
import Contex.SVG
alias __MODULE__
alias Contex.{Scale, ContinuousLinearScale, TimeScale}
alias Contex.CategoryColourScale
alias Contex.{Dataset, Mapping}
alias Contex.Axis
alias Contex.Utils
defstruct [
:dataset,
:mapping,
:options,
:x_scale,
:y_scale,
:legend_scale,
transforms: %{},
colour_palette: :default
]
@required_mappings [
x_col: :exactly_one,
y_cols: :one_or_more,
fill_col: :zero_or_one
]
@default_options [
axis_label_rotation: :auto,
custom_x_scale: nil,
custom_y_scale: nil,
custom_x_formatter: nil,
custom_y_formatter: nil,
width: 100,
height: 100,
colour_palette: :default
]
@type t() :: %__MODULE__{}
@doc ~S"""
Create a new point plot definition and apply defaults.
Options may be passed to control the settings for the barchart. Options available are:
- `:axis_label_rotation` : `:auto` (default), 45 or 90
Specifies the label rotation value that will be applied to the bottom axis. Accepts integer
values for degrees of rotation or `:auto`. Note that manually set rotation values other than
45 or 90 will be treated as zero. The default value is `:auto`, which sets the rotation to
zero degrees if the number of items on the axis is greater than eight, 45 degrees otherwise.
- `:custom_x_scale` : `nil` (default) or an instance of a suitable `Contex.Scale`.
The scale must be suitable for the data type and would typically be either `Contex.ContinuousLinearScale`
or `Contex.TimeScale`. It is not necessary to set the range for the scale as the range is set
as part of the chart layout process.
- `:custom_y_scale` : `nil` (default) or an instance of a suitable `Contex.Scale`.
- `:custom_x_formatter` : `nil` (default) or a function with arity 1
Allows the axis tick labels to be overridden. For example, if you have a numeric representation of money and you want to
have the x axis show it as millions of dollars you might do something like:
# Turns 1_234_567.67 into $1.23M
defp money_formatter_millions(value) when is_number(value) do
"$#{:erlang.float_to_binary(value/1_000_000.0, [decimals: 2])}M"
end
defp show_chart(data) do
PointPlot.new(
dataset,
mapping: %{x_col: :column_a, y_cols: [:column_b, column_c]},
custom_x_formatter: &money_formatter_millions/1
)
end
- `:custom_y_formatter` : `nil` (default) or a function with arity 1.
- `:colour_palette` : `:default` (default) or colour palette - see `colours/2`
Overrides the default colours.
Where multiple y columns are defined for the plot, a different colour will be used for
each column.
If a single y column is defined and a `:fill_col`column is mapped,
a different colour will be used for each unique value in the colour column.
If a single y column is defined and no `:fill_col`column is mapped, the first colour
in the supplied colour palette will be used to plot the points.
Colours can either be a named palette defined in `Contex.CategoryColourScale` or a list of strings representing hex code
of the colour as per CSS colour hex codes, but without the #. For example:
```
chart = PointPlot.new(
dataset,
mapping: %{x_col: :column_a, y_cols: [:column_b, column_c]},
colour_palette: ["fbb4ae", "b3cde3", "ccebc5"]
)
```
The colours will be applied to the data series in the same order as the columns are specified in `set_val_col_names/2`
- `:mapping` : Maps attributes required to generate the barchart to columns in the dataset.
If the data in the dataset is stored as a map, the `:mapping` option is required. If the dataset
is not stored as a map, `:mapping` may be left out, in which case the first column will be used
for the x and the second column used as the y.
This value must be a map of the plot's `:x_col` and `:y_cols` to keys in the map,
such as `%{x_col: :column_a, y_cols: [:column_b, column_c]}`.
The value for the `:y_cols` key must be a list.
If a single y column is specified an optional `:fill_col` mapping can be provided
to control the point colour. _This is ignored if there are multiple y columns_.
"""
@spec new(Contex.Dataset.t(), keyword()) :: Contex.PointPlot.t()
def new(%Dataset{} = dataset, options \\ []) do
options = Keyword.merge(@default_options, options)
mapping = Mapping.new(@required_mappings, Keyword.get(options, :mapping), dataset)
%PointPlot{dataset: dataset, mapping: mapping, options: options}
end
@doc """
Sets the default scales for the plot based on its column mapping.
"""
@deprecated "Default scales are now silently applied"
@spec set_default_scales(Contex.PointPlot.t()) :: Contex.PointPlot.t()
def set_default_scales(%PointPlot{mapping: %{column_map: column_map}} = plot) do
set_x_col_name(plot, column_map.x_col)
|> set_y_col_names(column_map.y_cols)
end
@doc """
Set the colour palette for fill colours.
Where multiple y columns are defined for the plot, a different colour will be used for
each column.
If a single y column is defined and a colour column is defined (see `set_colour_col_name/2`),
a different colour will be used for each unique value in the colour column.
If a single y column is defined and no colour column is defined, the first colour
in the supplied colour palette will be used to plot the points.
"""
@deprecated "Set in new/2 options"
@spec colours(Contex.PointPlot.t(), Contex.CategoryColourScale.colour_palette()) ::
Contex.PointPlot.t()
def colours(plot, colour_palette) when is_list(colour_palette) or is_atom(colour_palette) do
set_option(plot, :colour_palette, colour_palette)
end
def colours(plot, _) do
set_option(plot, :colour_palette, :default)
end
@doc """
Specifies the label rotation value that will be applied to the bottom axis. Accepts integer
values for degrees of rotation or `:auto`. Note that manually set rotation values other than
45 or 90 will be treated as zero. The default value is `:auto`, which sets the rotation to
zero degrees if the number of items on the axis is greater than eight, 45 degrees otherwise.
"""
@deprecated "Set in new/2 options"
@spec axis_label_rotation(Contex.PointPlot.t(), integer() | :auto) :: Contex.PointPlot.t()
def axis_label_rotation(%PointPlot{} = plot, rotation) when is_integer(rotation) do
set_option(plot, :axis_label_rotation, rotation)
end
def axis_label_rotation(%PointPlot{} = plot, _) do
set_option(plot, :axis_label_rotation, :auto)
end
@doc false
def set_size(%PointPlot{} = plot, width, height) do
plot
|> set_option(:width, width)
|> set_option(:height, height)
end
@doc ~S"""
Allows the axis tick labels to be overridden. For example, if you have a numeric representation of money and you want to
have the value axis show it as millions of dollars you might do something like:
# Turns 1_234_567.67 into $1.23M
defp money_formatter_millions(value) when is_number(value) do
"$#{:erlang.float_to_binary(value/1_000_000.0, [decimals: 2])}M"
end
defp show_chart(data) do
PointPlot.new(data)
|> PointPlot.custom_x_formatter(&money_formatter_millions/1)
end
"""
@deprecated "Set in new/2 options"
@spec custom_x_formatter(Contex.PointPlot.t(), nil | fun) :: Contex.PointPlot.t()
def custom_x_formatter(%PointPlot{} = plot, custom_x_formatter)
when is_function(custom_x_formatter) or custom_x_formatter == nil do
set_option(plot, :custom_x_formatter, custom_x_formatter)
end
@doc ~S"""
Allows the axis tick labels to be overridden. For example, if you have a numeric representation of money and you want to
have the value axis show it as millions of dollars you might do something like:
# Turns 1_234_567.67 into $1.23M
defp money_formatter_millions(value) when is_number(value) do
"$#{:erlang.float_to_binary(value/1_000_000.0, [decimals: 2])}M"
end
defp show_chart(data) do
PointPlot.new(data)
|> PointPlot.custom_y_formatter(&money_formatter_millions/1)
end
"""
@deprecated "Set in new/2 options"
@spec custom_y_formatter(Contex.PointPlot.t(), nil | fun) :: Contex.PointPlot.t()
def custom_y_formatter(%PointPlot{} = plot, custom_y_formatter)
when is_function(custom_y_formatter) or custom_y_formatter == nil do
set_option(plot, :custom_y_formatter, custom_y_formatter)
end
@doc """
Specify which column in the dataset is used for the x values.
This column must contain numeric or date time data.
"""
@deprecated "Use `:mapping` option in `new/2`"
@spec set_x_col_name(Contex.PointPlot.t(), Contex.Dataset.column_name()) :: Contex.PointPlot.t()
def set_x_col_name(%PointPlot{mapping: mapping} = plot, x_col_name) do
mapping = Mapping.update(mapping, %{x_col: x_col_name})
%{plot | mapping: mapping}
end
@doc """
Specify which column(s) in the dataset is/are used for the y values.
These columns must contain numeric data.
Where more than one y column is specified the colours are used to identify data from
each column.
"""
@deprecated "Use `:mapping` option in `new/2`"
@spec set_y_col_names(Contex.PointPlot.t(), [Contex.Dataset.column_name()]) ::
Contex.PointPlot.t()
def set_y_col_names(%PointPlot{mapping: mapping} = plot, y_col_names)
when is_list(y_col_names) do
mapping = Mapping.update(mapping, %{y_cols: y_col_names})
%{plot | mapping: mapping}
end
@doc """
If a single y column is specified, it is possible to use another column to control the point colour.
Note: This is ignored if there are multiple y columns.
"""
@deprecated "Use `:mapping` option in `new/2`"
@spec set_colour_col_name(Contex.PointPlot.t(), Contex.Dataset.column_name()) ::
Contex.PointPlot.t()
def set_colour_col_name(%PointPlot{} = plot, nil), do: plot
def set_colour_col_name(%PointPlot{mapping: mapping} = plot, fill_col_name) do
mapping = Mapping.update(mapping, %{fill_col: fill_col_name})
%{plot | mapping: mapping}
end
defp set_option(%PointPlot{options: options} = plot, key, value) do
options = Keyword.put(options, key, value)
%{plot | options: options}
end
defp get_option(%PointPlot{options: options}, key) do
Keyword.get(options, key)
end
@doc false
def get_svg_legend(%PointPlot{} = plot) do
plot = prepare_scales(plot)
Contex.Legend.to_svg(plot.legend_scale)
end
def get_svg_legend(_), do: ""
@doc false
def to_svg(%PointPlot{} = plot) do
plot = prepare_scales(plot)
x_scale = plot.x_scale
y_scale = plot.y_scale
axis_x = get_x_axis(x_scale, plot)
axis_y = Axis.new_left_axis(y_scale) |> Axis.set_offset(get_option(plot, :width))
[
Axis.to_svg(axis_x),
Axis.to_svg(axis_y),
"<g>",
get_svg_points(plot),
"</g>"
]
end
defp get_x_axis(x_scale, plot) do
rotation =
case get_option(plot, :axis_label_rotation) do
:auto ->
if length(Scale.ticks_range(x_scale)) > 8, do: 45, else: 0
degrees ->
degrees
end
x_scale
|> Axis.new_bottom_axis()
|> Axis.set_offset(get_option(plot, :height))
|> Kernel.struct(rotation: rotation)
end
defp get_svg_points(%PointPlot{dataset: dataset} = plot) do
dataset.data
|> Enum.map(fn row -> get_svg_point(plot, row) end)
end
defp get_svg_point(
%PointPlot{
mapping: %{accessors: accessors},
transforms: transforms
},
row
) do
x =
accessors.x_col.(row)
|> transforms.x.()
fill_val = accessors.fill_col.(row)
Enum.with_index(accessors.y_cols)
|> Enum.map(fn {accessor, index} ->
val = accessor.(row)
case val do
nil ->
""
_ ->
y = transforms.y.(val)
fill = transforms.colour.(index, fill_val)
get_svg_point(x, y, fill)
end
end)
end
defp get_svg_point(x, y, fill) when is_number(x) and is_number(y) do
circle(x, y, 3, fill: fill)
end
defp get_svg_point(_x, _y, _fill), do: ""
@doc false
def prepare_scales(%PointPlot{} = plot) do
plot
|> prepare_x_scale()
|> prepare_y_scale()
|> prepare_colour_scale()
end
defp prepare_x_scale(%PointPlot{dataset: dataset, mapping: mapping} = plot) do
x_col_name = mapping.column_map[:x_col]
width = get_option(plot, :width)
custom_x_scale = get_option(plot, :custom_x_scale)
x_scale =
case custom_x_scale do
nil -> create_scale_for_column(dataset, x_col_name, {0, width})
_ -> custom_x_scale |> Scale.set_range(0, width)
end
x_scale = %{x_scale | custom_tick_formatter: get_option(plot, :custom_x_formatter)}
x_transform = Scale.domain_to_range_fn(x_scale)
transforms = Map.merge(plot.transforms, %{x: x_transform})
%{plot | x_scale: x_scale, transforms: transforms}
end
defp prepare_y_scale(%PointPlot{dataset: dataset, mapping: mapping} = plot) do
y_col_names = mapping.column_map[:y_cols]
height = get_option(plot, :height)
custom_y_scale = get_option(plot, :custom_y_scale)
y_scale =
case custom_y_scale do
nil ->
{min, max} =
get_overall_domain(dataset, y_col_names)
|> Utils.fixup_value_range()
ContinuousLinearScale.new()
|> ContinuousLinearScale.domain(min, max)
|> Scale.set_range(height, 0)
_ ->
custom_y_scale |> Scale.set_range(height, 0)
end
y_scale = %{y_scale | custom_tick_formatter: get_option(plot, :custom_y_formatter)}
y_transform = Scale.domain_to_range_fn(y_scale)
transforms = Map.merge(plot.transforms, %{y: y_transform})
%{plot | y_scale: y_scale, transforms: transforms}
end
defp prepare_colour_scale(%PointPlot{dataset: dataset, mapping: mapping} = plot) do
y_col_names = mapping.column_map[:y_cols]
fill_col_name = mapping.column_map[:fill_col]
palette = get_option(plot, :colour_palette)
# It's a little tricky. We look up colours by index when colouring by series
# but need the legend by column name, so where we are colouring by series
# we will create a transform function with one instance of a colour scale
# and the legend from another
legend_scale = create_legend_colour_scale(y_col_names, fill_col_name, dataset, palette)
transform = create_colour_transform(y_col_names, fill_col_name, dataset, palette)
transforms = Map.merge(plot.transforms, %{colour: transform})
%{plot | legend_scale: legend_scale, transforms: transforms}
end
defp create_legend_colour_scale(y_col_names, fill_col_name, dataset, palette)
when length(y_col_names) == 1 and not is_nil(fill_col_name) do
vals = Dataset.unique_values(dataset, fill_col_name)
CategoryColourScale.new(vals) |> CategoryColourScale.set_palette(palette)
end
defp create_legend_colour_scale(y_col_names, _fill_col_name, _dataset, palette) do
CategoryColourScale.new(y_col_names) |> CategoryColourScale.set_palette(palette)
end
defp create_colour_transform(y_col_names, fill_col_name, dataset, palette)
when length(y_col_names) == 1 and not is_nil(fill_col_name) do
vals = Dataset.unique_values(dataset, fill_col_name)
scale = CategoryColourScale.new(vals) |> CategoryColourScale.set_palette(palette)
fn _col_index, fill_val -> CategoryColourScale.colour_for_value(scale, fill_val) end
end
defp create_colour_transform(y_col_names, _fill_col_name, _dataset, palette) do
fill_indices =
Enum.with_index(y_col_names)
|> Enum.map(fn {_, index} -> index end)
scale = CategoryColourScale.new(fill_indices) |> CategoryColourScale.set_palette(palette)
fn col_index, _fill_val -> CategoryColourScale.colour_for_value(scale, col_index) end
end
defp get_overall_domain(dataset, col_names) do
combiner = fn {min1, max1}, {min2, max2} ->
{Utils.safe_min(min1, min2), Utils.safe_max(max1, max2)}
end
Enum.reduce(col_names, {nil, nil}, fn col, acc_extents ->
inner_extents = Dataset.column_extents(dataset, col)
combiner.(acc_extents, inner_extents)
end)
end
defp create_scale_for_column(dataset, column, {r_min, r_max}) do
{min, max} = Dataset.column_extents(dataset, column)
case Dataset.guess_column_type(dataset, column) do
:datetime ->
TimeScale.new()
|> TimeScale.domain(min, max)
|> Scale.set_range(r_min, r_max)
:number ->
ContinuousLinearScale.new()
|> ContinuousLinearScale.domain(min, max)
|> Scale.set_range(r_min, r_max)
end
end
end
| 34.717391 | 122 | 0.694541 |
9e12a032272898f7fa6d96a0a27aa29f3027ed51 | 361 | exs | Elixir | priv/repo/seeds.exs | LuizFerK/ElixirGitHub | 7db1270e296c6f0a33b7a85e80753cc010ea50af | [
"MIT"
] | 1 | 2021-11-23T16:51:04.000Z | 2021-11-23T16:51:04.000Z | priv/repo/seeds.exs | LuizFerK/Repositoriex | 7db1270e296c6f0a33b7a85e80753cc010ea50af | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | LuizFerK/Repositoriex | 7db1270e296c6f0a33b7a85e80753cc010ea50af | [
"MIT"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Elixirgithub.Repo.insert!(%Elixirgithub.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 30.083333 | 61 | 0.714681 |
9e12b65fe39ef2f82052ffa48caecf7ac12736b0 | 43,094 | ex | Elixir | lib/ecto/repo.ex | carakan/ecto | b09ac0c93d45a421b379ed4671c805b28a70cec9 | [
"Apache-2.0"
] | null | null | null | lib/ecto/repo.ex | carakan/ecto | b09ac0c93d45a421b379ed4671c805b28a70cec9 | [
"Apache-2.0"
] | null | null | null | lib/ecto/repo.ex | carakan/ecto | b09ac0c93d45a421b379ed4671c805b28a70cec9 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Repo do
@moduledoc """
Defines a repository.
A repository maps to an underlying data store, controlled by the
adapter. For example, Ecto ships with a Postgres adapter that
stores data into a PostgreSQL database.
When used, the repository expects the `:otp_app` as option.
The `:otp_app` should point to an OTP application that has
the repository configuration. For example, the repository:
defmodule Repo do
use Ecto.Repo,
otp_app: :my_app,
adapter: Ecto.Adapters.Postgres
end
Could be configured with:
config :my_app, Repo,
database: "ecto_simple",
username: "postgres",
password: "postgres",
hostname: "localhost"
Most of the configuration that goes into the `config` is specific
to the adapter, so check `Ecto.Adapters.Postgres` documentation
for more information. However, some configuration is shared across
all adapters, they are:
* `:name`- The name of the Repo supervisor process
* `:priv` - the directory where to keep repository data, like
migrations, schema and more. Defaults to "priv/YOUR_REPO".
It must always point to a subdirectory inside the priv directory.
* `:url` - an URL that specifies storage information. Read below
for more information
* `:log` - the log level used when logging the query with Elixir's
Logger. If false, disables logging for that repository.
Defaults to `:debug`.
* `:telemetry_prefix` - we recommend adapters to publish events
using the `Telemetry` library. By default, the telemetry prefix
is based on the module name, so if your module is called
`MyApp.Repo`, the prefix will be `[:my_app, :repo]`. See the
"Telemetry Events" section to see which events we recommend
adapters to publish
## URLs
Repositories by default support URLs. For example, the configuration
above could be rewritten to:
config :my_app, Repo,
url: "ecto://postgres:postgres@localhost/ecto_simple"
The schema can be of any value. The path represents the database name
while options are simply merged in.
URL can include query parameters to override shared and adapter-specific
options `ssl`, `timeout`, `pool_timeout`, `pool_size`:
config :my_app, Repo,
url: "ecto://postgres:postgres@localhost/ecto_simple?ssl=true&pool_size=10"
In case the URL needs to be dynamically configured, for example by
reading a system environment variable, such can be done via the
`c:init/2` repository callback:
def init(_type, config) do
{:ok, Keyword.put(config, :url, System.get_env("DATABASE_URL"))}
end
## Shared options
Almost all of the repository operations below accept the following
options:
* `:timeout` - The time in milliseconds to wait for the query call to
finish, `:infinity` will wait indefinitely (default: 15000);
* `:pool_timeout` - The time in milliseconds to wait for calls to the pool
to finish, `:infinity` will wait indefinitely (default: 5000);
* `:log` - When false, does not log the query
Such cases will be explicitly documented as well as any extra option.
## Telemetry events
We recommend adapters to publish certain `Telemetry` events listed below.
Those events will use the `:telemetry_prefix` outlined above. See the
`Telemetry` library for information on how to handle such events. All
examples below consider a repository named `MyApp.Repo`:
* `[:my_app, :repo, :query]` - should be invoked on every query send
to the adapter, including queries that are related to the transaction
management. The payload will be an `Ecto.LogEntry` struct
"""
@type t :: module
@doc false
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@behaviour Ecto.Repo
{otp_app, adapter, behaviours} = Ecto.Repo.Supervisor.compile_config(__MODULE__, opts)
@otp_app otp_app
@adapter adapter
@before_compile adapter
def config do
{:ok, config} = Ecto.Repo.Supervisor.runtime_config(:runtime, __MODULE__, @otp_app, [])
config
end
def __adapter__ do
@adapter
end
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :supervisor
}
end
def start_link(opts \\ []) do
Ecto.Repo.Supervisor.start_link(__MODULE__, @otp_app, @adapter, opts)
end
def stop(timeout \\ 5000) do
Supervisor.stop(__MODULE__, :normal, timeout)
end
def load(schema_or_types, data) do
Ecto.Repo.Schema.load(@adapter, schema_or_types, data)
end
def checkout(fun, opts \\ []) when is_function(fun) do
{adapter, meta} = Ecto.Repo.Registry.lookup(__MODULE__)
adapter.checkout(meta, opts, fun)
end
## Transactions
if Ecto.Adapter.Transaction in behaviours do
def transaction(fun_or_multi, opts \\ []) do
Ecto.Repo.Transaction.transaction(__MODULE__, fun_or_multi, opts)
end
def in_transaction? do
Ecto.Repo.Transaction.in_transaction?(__MODULE__)
end
@spec rollback(term) :: no_return
def rollback(value) do
Ecto.Repo.Transaction.rollback(__MODULE__, value)
end
end
## Schemas
if Ecto.Adapter.Schema in behaviours do
def insert(struct, opts \\ []) do
Ecto.Repo.Schema.insert(__MODULE__, struct, opts)
end
def update(struct, opts \\ []) do
Ecto.Repo.Schema.update(__MODULE__, struct, opts)
end
def insert_or_update(changeset, opts \\ []) do
Ecto.Repo.Schema.insert_or_update(__MODULE__, changeset, opts)
end
def delete(struct, opts \\ []) do
Ecto.Repo.Schema.delete(__MODULE__, struct, opts)
end
def insert!(struct, opts \\ []) do
Ecto.Repo.Schema.insert!(__MODULE__, struct, opts)
end
def update!(struct, opts \\ []) do
Ecto.Repo.Schema.update!(__MODULE__, struct, opts)
end
def insert_or_update!(changeset, opts \\ []) do
Ecto.Repo.Schema.insert_or_update!(__MODULE__, changeset, opts)
end
def delete!(struct, opts \\ []) do
Ecto.Repo.Schema.delete!(__MODULE__, struct, opts)
end
def insert_all(schema_or_source, entries, opts \\ []) do
Ecto.Repo.Schema.insert_all(__MODULE__, schema_or_source, entries, opts)
end
end
## Queryable
if Ecto.Adapter.Queryable in behaviours do
def update_all(queryable, updates, opts \\ []) do
Ecto.Repo.Queryable.update_all(__MODULE__, queryable, updates, opts)
end
def delete_all(queryable, opts \\ []) do
Ecto.Repo.Queryable.delete_all(__MODULE__, queryable, opts)
end
def all(queryable, opts \\ []) do
Ecto.Repo.Queryable.all(__MODULE__, queryable, opts)
end
def stream(queryable, opts \\ []) do
Ecto.Repo.Queryable.stream(__MODULE__, queryable, opts)
end
def get(queryable, id, opts \\ []) do
Ecto.Repo.Queryable.get(__MODULE__, queryable, id, opts)
end
def get!(queryable, id, opts \\ []) do
Ecto.Repo.Queryable.get!(__MODULE__, queryable, id, opts)
end
def get_by(queryable, clauses, opts \\ []) do
Ecto.Repo.Queryable.get_by(__MODULE__, queryable, clauses, opts)
end
def get_by!(queryable, clauses, opts \\ []) do
Ecto.Repo.Queryable.get_by!(__MODULE__, queryable, clauses, opts)
end
def one(queryable, opts \\ []) do
Ecto.Repo.Queryable.one(__MODULE__, queryable, opts)
end
def one!(queryable, opts \\ []) do
Ecto.Repo.Queryable.one!(__MODULE__, queryable, opts)
end
def aggregate(queryable, aggregate, field, opts \\ [])
when aggregate in [:count, :avg, :max, :min, :sum] and is_atom(field) do
Ecto.Repo.Queryable.aggregate(__MODULE__, queryable, aggregate, field, opts)
end
def exists?(queryable, opts \\ []) do
Ecto.Repo.Queryable.exists?(__MODULE__, queryable, opts)
end
def preload(struct_or_structs_or_nil, preloads, opts \\ []) do
Ecto.Repo.Preloader.preload(struct_or_structs_or_nil, __MODULE__, preloads, opts)
end
end
end
end
## User callbacks
@optional_callbacks init: 2
@doc """
A callback executed when the repo starts or when configuration is read.
The first argument is the context the callback is being invoked. If it
is called because the Repo supervisor is starting, it will be `:supervisor`.
It will be `:runtime` if it is called for reading configuration without
actually starting a process.
The second argument is the repository configuration as stored in the
application environment. It must return `{:ok, keyword}` with the updated
list of configuration or `:ignore` (only in the `:supervisor` case).
"""
@callback init(:supervisor | :runtime, config :: Keyword.t()) :: {:ok, Keyword.t()} | :ignore
## Ecto.Adapter
@doc """
Returns the adapter tied to the repository.
"""
@callback __adapter__ :: Ecto.Adapter.t()
@doc """
Returns the adapter configuration stored in the `:otp_app` environment.
If the `c:init/2` callback is implemented in the repository,
it will be invoked with the first argument set to `:runtime`.
"""
@callback config() :: Keyword.t()
@doc """
Starts any connection pooling or supervision and return `{:ok, pid}`
or just `:ok` if nothing needs to be done.
Returns `{:error, {:already_started, pid}}` if the repo is already
started or `{:error, term}` in case anything else goes wrong.
## Options
See the configuration in the moduledoc for options shared between adapters,
for adapter-specific configuration see the adapter's documentation.
"""
@callback start_link(opts :: Keyword.t()) ::
{:ok, pid}
| {:error, {:already_started, pid}}
| {:error, term}
@doc """
Shuts down the repository.
"""
@callback stop(timeout) :: :ok
@doc """
Checks out a connection for the duration of the function.
It returns the result of the function. This is useful when
you need to perform multiple operations against the repository
in a row and you want to avoid checking out the connection
multiple times.
`checkout/2` and `transaction/2` can be combined and nested
multiple times. If `checkout/2` is called inside the function
of another `checkout/2` call, the function is simply executed,
without checking out a new connection.
## Options
See the "Shared options" section at the module documentation.
"""
@callback checkout((() -> result), opts :: Keyword.t()) :: result when result: var
@doc """
Loads `data` into a struct or a map.
The first argument can be a a schema module, or a
map (of types) and determines the return value:
a struct or a map, respectively.
The second argument `data` specifies fields and values that are to be loaded.
It can be a map, a keyword list, or a `{fields, values}` tuple.
Fields can be atoms or strings.
Fields that are not present in the schema (or `types` map) are ignored.
If any of the values has invalid type, an error is raised.
## Examples
iex> MyRepo.load(User, %{name: "Alice", age: 25})
%User{name: "Alice", age: 25}
iex> MyRepo.load(User, [name: "Alice", age: 25])
%User{name: "Alice", age: 25}
`data` can also take form of `{fields, values}`:
iex> MyRepo.load(User, {[:name, :age], ["Alice", 25]})
%User{name: "Alice", age: 25, ...}
The first argument can also be a `types` map:
iex> types = %{name: :string, age: :integer}
iex> MyRepo.load(types, %{name: "Alice", age: 25})
%{name: "Alice", age: 25}
This function is especially useful when parsing raw query results:
iex> result = Ecto.Adapters.SQL.query!(MyRepo, "SELECT * FROM users", [])
iex> Enum.map(result.rows, &MyRepo.load(User, {result.columns, &1}))
[%User{...}, ...]
"""
@callback load(
module_or_map :: module | map(),
data :: map() | Keyword.t() | {list, list}
) :: Ecto.Schema.t() | map()
## Ecto.Adapter.Queryable
@optional_callbacks get: 3, get!: 3, get_by: 3, get_by!: 3, aggregate: 4, one: 2, one!: 2,
preload: 3, all: 2, stream: 2, update_all: 3, delete_all: 2
@doc """
Fetches a single struct from the data store where the primary key matches the
given id.
Returns `nil` if no result was found. If the struct in the queryable
has no or more than one primary key, it will raise an argument error.
## Options
See the "Shared options" section at the module documentation.
## Example
MyRepo.get(Post, 42)
"""
@callback get(queryable :: Ecto.Queryable.t(), id :: term, opts :: Keyword.t()) ::
Ecto.Schema.t() | nil
@doc """
Similar to `c:get/3` but raises `Ecto.NoResultsError` if no record was found.
## Options
See the "Shared options" section at the module documentation.
## Example
MyRepo.get!(Post, 42)
"""
@callback get!(queryable :: Ecto.Queryable.t(), id :: term, opts :: Keyword.t()) ::
Ecto.Schema.t() | nil
@doc """
Fetches a single result from the query.
Returns `nil` if no result was found. Raises if more than one entry.
## Options
See the "Shared options" section at the module documentation.
## Example
MyRepo.get_by(Post, title: "My post")
"""
@callback get_by(
queryable :: Ecto.Queryable.t(),
clauses :: Keyword.t() | map,
opts :: Keyword.t()
) :: Ecto.Schema.t() | nil
@doc """
Similar to `get_by/3` but raises `Ecto.NoResultsError` if no record was found.
Raises if more than one entry.
## Options
See the "Shared options" section at the module documentation.
## Example
MyRepo.get_by!(Post, title: "My post")
"""
@callback get_by!(
queryable :: Ecto.Queryable.t(),
clauses :: Keyword.t() | map,
opts :: Keyword.t()
) :: Ecto.Schema.t() | nil
@doc """
Calculate the given `aggregate` over the given `field`.
If the query has a limit, offset or distinct set, it will be
automatically wrapped in a subquery in order to return the
proper result.
Any preload or select in the query will be ignored in favor of
the column being aggregated.
The aggregation will fail if any `group_by` field is set.
## Options
See the "Shared options" section at the module documentation.
## Examples
# Returns the number of visits per blog post
Repo.aggregate(Post, :count, :visits)
# Returns the average number of visits for the top 10
query = from Post, limit: 10
Repo.aggregate(query, :avg, :visits)
"""
@callback aggregate(
queryable :: Ecto.Queryable.t(),
aggregate :: :avg | :count | :max | :min | :sum,
field :: atom,
opts :: Keyword.t()
) :: term | nil
@doc """
Fetches a single result from the query.
Returns `nil` if no result was found. Raises if more than one entry.
## Options
See the "Shared options" section at the module documentation.
"""
@callback one(queryable :: Ecto.Queryable.t(), opts :: Keyword.t()) ::
Ecto.Schema.t() | nil
@doc """
Similar to `c:one/2` but raises `Ecto.NoResultsError` if no record was found.
Raises if more than one entry.
## Options
See the "Shared options" section at the module documentation.
"""
@callback one!(queryable :: Ecto.Queryable.t(), opts :: Keyword.t()) ::
Ecto.Schema.t()
@doc """
Preloads all associations on the given struct or structs.
This is similar to `Ecto.Query.preload/3` except it allows
you to preload structs after they have been fetched from the
database.
In case the association was already loaded, preload won't attempt
to reload it.
## Options
Besides the "Shared options" section at the module documentation,
it accepts:
* `:force` - By default, Ecto won't preload associations that
are already loaded. By setting this option to true, any existing
association will be discarded and reloaded.
* `:in_parallel` - If the preloads must be done in parallel. It can
only be performed when we have more than one preload and the
repository is not in a transaction. Defaults to `true`.
* `:prefix` - the prefix to fetch preloads from. By default, queries
will use the same prefix as the one in the given collection. This
option allows the prefix to be changed.
## Examples
# Use a single atom to preload an association
posts = Repo.preload posts, :comments
# Use a list of atoms to preload multiple associations
posts = Repo.preload posts, [:comments, :authors]
# Use a keyword list to preload nested associations as well
posts = Repo.preload posts, [comments: [:replies, :likes], authors: []]
# Use a keyword list to customize how associations are queried
posts = Repo.preload posts, [comments: from(c in Comment, order_by: c.published_at)]
# Use a two-element tuple for a custom query and nested association definition
query = from c in Comment, order_by: c.published_at
posts = Repo.preload posts, [comments: {query, [:replies, :likes]}]
The query given to preload may also preload its own associations.
"""
@callback preload(structs_or_struct_or_nil, preloads :: term, opts :: Keyword.t()) ::
structs_or_struct_or_nil
when structs_or_struct_or_nil: [Ecto.Schema.t()] | Ecto.Schema.t() | nil
@doc """
Fetches all entries from the data store matching the given query.
May raise `Ecto.QueryError` if query validation fails.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query.
See the "Shared options" section at the module documentation.
## Example
# Fetch all post titles
query = from p in Post,
select: p.title
MyRepo.all(query)
"""
@callback all(queryable :: Ecto.Queryable.t(), opts :: Keyword.t()) :: [Ecto.Schema.t()]
@doc """
Returns a lazy enumerable that emits all entries from the data store
matching the given query.
SQL adapters, such as Postgres and MySQL, can only enumerate a stream
inside a transaction.
May raise `Ecto.QueryError` if query validation fails.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query
* `:max_rows` - The number of rows to load from the database as we stream.
It is supported at least by Postgres and MySQL and defaults to 500.
See the "Shared options" section at the module documentation.
## Example
# Fetch all post titles
query = from p in Post,
select: p.title
stream = MyRepo.stream(query)
MyRepo.transaction(fn() ->
Enum.to_list(stream)
end)
"""
@callback stream(queryable :: Ecto.Queryable.t(), opts :: Keyword.t()) :: Enum.t()
@doc """
Updates all entries matching the given query with the given values.
It returns a tuple containing the number of entries and any returned
result as second element. The second element is `nil` by default
unless a `select` is supplied in the update query. Note, however,
not all databases support returning data from UPDATEs.
Keep in mind this `update_all` will not update autogenerated
fields like the `updated_at` columns.
See `Ecto.Query.update/3` for update operations that can be
performed on fields.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query.
See the "Shared options" section at the module documentation for
remaining options.
## Examples
MyRepo.update_all(Post, set: [title: "New title"])
MyRepo.update_all(Post, inc: [visits: 1])
MyRepo.update_all(Post, [inc: [visits: 1]], [returning: [:visits]])
from(p in Post, where: p.id < 10)
|> MyRepo.update_all(set: [title: "New title"])
from(p in Post, where: p.id < 10, update: [set: [title: "New title"]])
|> MyRepo.update_all([])
from(p in Post, where: p.id < 10, update: [set: [title: ^new_title]])
|> MyRepo.update_all([])
from(p in Post, where: p.id < 10, update: [set: [title: fragment("upper(?)", ^new_title)]])
|> MyRepo.update_all([])
"""
@callback update_all(
queryable :: Ecto.Queryable.t(),
updates :: Keyword.t(),
opts :: Keyword.t()
) :: {integer, nil | [term]}
@doc """
Deletes all entries matching the given query.
It returns a tuple containing the number of entries and any returned
result as second element. The second element is `nil` by default
unless a `select` is supplied in the update query. Note, however,
not all databases support returning data from DELETEs.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query.
See the "Shared options" section at the module documentation for
remaining options.
## Examples
MyRepo.delete_all(Post)
from(p in Post, where: p.id < 10) |> MyRepo.delete_all
"""
@callback delete_all(queryable :: Ecto.Queryable.t(), opts :: Keyword.t()) ::
{integer, nil | [term]}
## Ecto.Adapter.Schema
@optional_callbacks insert_all: 3, insert: 2, insert!: 2, update: 2, update!: 2,
delete: 2, delete!: 2, insert_or_update: 2, insert_or_update!: 2
@doc """
Inserts all entries into the repository.
It expects a schema module (`MyApp.User`) or a source (`"users"`) or
both (`{"users", MyApp.User}`) as the first argument. The second
argument is a list of entries to be inserted, either as keyword
lists or as maps.
It returns a tuple containing the number of entries
and any returned result as second element. If the database
does not support RETURNING in INSERT statements or no
return result was selected, the second element will be `nil`.
When a schema module is given, the entries given will be properly dumped
before being sent to the database. If the schema contains an
autogenerated ID field, it will be handled either at the adapter
or the storage layer. However any other autogenerated value, like
timestamps, won't be autogenerated when using `c:insert_all/3`.
This is by design as this function aims to be a more direct way
to insert data into the database without the conveniences of
`c:insert/2`. This is also consistent with `c:update_all/3` that
does not handle timestamps as well.
It is also not possible to use `insert_all` to insert across multiple
tables, therefore associations are not supported.
If a source is given, without a schema module, the given fields are passed
as is to the adapter.
## Options
* `:returning` - selects which fields to return. When `true`,
returns all fields in the given schema. May be a list of
fields, where a struct is still returned but only with the
given fields. Or `false`, where nothing is returned (the default).
This option is not supported by all databases.
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL).
* `:on_conflict` - It may be one of `:raise` (the default), `:nothing`,
`:replace_all`, `:replace_all_except_primary_key`, `{:replace, fields}`,
a keyword list of update instructions, `{:replace, fields}` or an `Ecto.Query`
query for updates. See the "Upserts" section for more information.
* `:conflict_target` - A list of column names to verify for conflicts.
It is expected those columns to have unique indexes on them that may conflict.
If none is specified, the conflict target is left up to the database.
It may also be `{:constraint, constraint_name_as_atom}` in databases
that support the "ON CONSTRAINT" expression, such as PostgreSQL, or
`{:unsafe_fragment, binary_fragment}` to pass any expression to the
database without any sanitization, such as
`ON CONFLICT (coalesce(firstname, ""), coalesce(lastname, ""))`.
See the "Shared options" section at the module documentation for
remaining options.
## Examples
MyRepo.insert_all(Post, [[title: "My first post"], [title: "My second post"]])
MyRepo.insert_all(Post, [%{title: "My first post"}, %{title: "My second post"}])
## Upserts
`c:insert_all/3` provides upserts (update or inserts) via the `:on_conflict`
option. The `:on_conflict` option supports the following values:
* `:raise` - raises if there is a conflicting primary key or unique index
* `:nothing` - ignores the error in case of conflicts
* `:replace_all` - replace all values on the existing row by the new entry,
including values not sent explicitly by Ecto, such as database defaults.
This option requires a schema
* `:replace_all_except_primary_key` - same as above except primary keys are
not replaced. This option requires a schema
* `{:replace, fields}` - replace only specific columns. This option requires
conflict_target
* a keyword list of update instructions - such as the one given to
`c:update_all/3`, for example: `[set: [title: "new title"]]`
* an `Ecto.Query` that will act as an `UPDATE` statement, such as the
one given to `c:update_all/3`
Upserts map to "ON CONFLICT" on databases like Postgres and "ON DUPLICATE KEY"
on databases such as MySQL.
## Return values
By default, both Postgres and MySQL return the amount of entries
inserted on `c:insert_all/3`. However, when the `:on_conflict` option
is specified, Postgres will only return a row if it was affected
while MySQL returns at least the number of entries attempted.
For example, if `:on_conflict` is set to `:nothing`, Postgres will
return 0 if no new entry was added while MySQL will still return
the amount of entries attempted to be inserted, even if no entry
was added. Even worse, if `:on_conflict` is query, MySQL will return
the number of attempted entries plus the number of entries modified
by the UPDATE query.
"""
@callback insert_all(
schema_or_source :: binary | {binary, module} | module,
entries :: [map | Keyword.t()],
opts :: Keyword.t()
) :: {integer, nil | [term]}
@doc """
Inserts a struct defined via `Ecto.Schema` or a changeset.
In case a struct is given, the struct is converted into a changeset
with all non-nil fields as part of the changeset.
In case a changeset is given, the changes in the changeset are
merged with the struct fields, and all of them are sent to the
database.
It returns `{:ok, struct}` if the struct has been successfully
inserted or `{:error, changeset}` if there was a validation
or a known constraint error.
## Options
* `:returning` - selects which fields to return. When `true`, returns
all fields in the given struct. May be a list of fields, where a
struct is still returned but only with the given fields. In any case,
it will include fields with `read_after_writes` set to true.
Not all databases support this option.
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the struct.
* `:on_conflict` - It may be one of `:raise` (the default), `:nothing`,
`:replace_all`, `:replace_all_except_primary_key`, `{:replace, fields}`,
a keyword list of update instructions or an `Ecto.Query` query for updates.
See the "Upserts" section for more information.
* `:conflict_target` - A list of column names to verify for conflicts.
It is expected those columns to have unique indexes on them that may conflict.
If none is specified, the conflict target is left up to the database.
May also be `{:constraint, constraint_name_as_atom}` in databases
that support the "ON CONSTRAINT" expression, such as PostgreSQL.
* `:stale_error_field` - The field where stale errors will be added in
the returning changeset. This option can be used to avoid raising
`Ecto.StaleEntryError`.
* `:stale_error_message` - The message to add to the configured
`:stale_error_field` when stale errors happen, defaults to "is stale".
See the "Shared options" section at the module documentation.
## Examples
A typical example is calling `MyRepo.insert/1` with a struct
and acting on the return value:
case MyRepo.insert %Post{title: "Ecto is great"} do
{:ok, struct} -> # Inserted with success
{:error, changeset} -> # Something went wrong
end
## Upserts
`c:insert/2` provides upserts (update or inserts) via the `:on_conflict`
option. The `:on_conflict` option supports the following values:
* `:raise` - raises if there is a conflicting primary key or unique index
* `:nothing` - ignores the error in case of conflicts
* `:replace_all` - replace all values on the existing row with the values
in the schema/changeset, including autogenerated fields such as `inserted_at`
and `updated_at`
* `:replace_all_except_primary_key` - same as above except primary keys are
not replaced
* `{:replace, fields}` - replace only specific columns. This option requires
conflict_target
* a keyword list of update instructions - such as the one given to
`c:update_all/3`, for example: `[set: [title: "new title"]]`
* an `Ecto.Query` that will act as an `UPDATE` statement, such as the
one given to `c:update_all/3`
Upserts map to "ON CONFLICT" on databases like Postgres and "ON DUPLICATE KEY"
on databases such as MySQL.
As an example, imagine `:title` is marked as a unique column in
the database:
{:ok, inserted} = MyRepo.insert(%Post{title: "this is unique"})
Now we can insert with the same title but do nothing on conflicts:
{:ok, ignored} = MyRepo.insert(%Post{title: "this is unique"}, on_conflict: :nothing)
assert ignored.id == nil
Because we used `on_conflict: :nothing`, instead of getting an error,
we got `{:ok, struct}`. However the returned struct does not reflect
the data in the database. One possible mechanism to detect if an
insert or nothing happened in case of `on_conflict: :nothing` is by
checking the `id` field. `id` will be nil if the field is autogenerated
by the database and no insert happened.
For actual upserts, where an insert or update may happen, the situation
is slightly more complex, as the database does not actually inform us
if an insert or update happened. Let's insert a post with the same title
but use a query to update the body column in case of conflicts:
# In Postgres (it requires the conflict target for updates):
on_conflict = [set: [body: "updated"]]
{:ok, updated} = MyRepo.insert(%Post{title: "this is unique"},
on_conflict: on_conflict, conflict_target: :title)
# In MySQL (conflict target is not supported):
on_conflict = [set: [title: "updated"]]
{:ok, updated} = MyRepo.insert(%Post{id: inserted.id, title: "updated"},
on_conflict: on_conflict)
In the examples above, even though it returned `:ok`, we do not know
if we inserted new data or if we updated only the `:on_conflict` fields.
In case an update happened, the data in the struct most likely does
not match the data in the database. For example, autogenerated fields
such as `inserted_at` will point to now rather than the time the
struct was actually inserted.
If you need to guarantee the data in the returned struct mirrors the
database, you have three options:
* Use `on_conflict: :replace_all`, although that will replace all
fields in the database with the ones in the struct/changeset,
including autogenerated fields such as `insert_at` and `updated_at`:
MyRepo.insert(%Post{title: "this is unique"},
on_conflict: :replace_all, conflict_target: :title)
* Specify `read_after_writes: true` in your schema for choosing
fields that are read from the database after every operation.
Or pass `returning: true` to `insert` to read all fields back:
MyRepo.insert(%Post{title: "this is unique"}, returning: true,
on_conflict: on_conflict, conflict_target: :title)
* Alternatively, read the data again from the database in a separate
query. This option requires the primary key to be generated by the
database:
{:ok, updated} = MyRepo.insert(%Post{title: "this is unique"}, on_conflict: on_conflict)
Repo.get(Post, updated.id)
Because of the inability to know if the struct is up to date or not,
using associations with the `:on_conflict` option is not recommended.
For instance, Ecto may even trigger constraint violations when associations
are used with `on_conflict: :nothing`, as no ID will be available in
the case the record already exists, and it is not possible for Ecto to
detect such cases reliably.
"""
@callback insert(
struct_or_changeset :: Ecto.Schema.t() | Ecto.Changeset.t(),
opts :: Keyword.t()
) :: {:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
@doc """
Updates a changeset using its primary key.
A changeset is required as it is the only mechanism for
tracking dirty changes. Only the fields present in the `changes` part
of the changeset are sent to the database. Any other, in-memory
changes done to the schema are ignored.
If the struct has no primary key, `Ecto.NoPrimaryKeyFieldError`
will be raised.
It returns `{:ok, struct}` if the struct has been successfully
updated or `{:error, changeset}` if there was a validation
or a known constraint error.
## Options
Besides the "Shared options" section at the module documentation,
it accepts:
* `:force` - By default, if there are no changes in the changeset,
`c:update/2` is a no-op. By setting this option to true, update
callbacks will always be executed, even if there are no changes
(including timestamps).
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the struct.
* `:stale_error_field` - The field where stale errors will be added in
the returning changeset. This option can be used to avoid raising
`Ecto.StaleEntryError`.
* `:stale_error_message` - The message to add to the configured
`:stale_error_field` when stale errors happen, defaults to "is stale".
## Example
post = MyRepo.get!(Post, 42)
post = Ecto.Changeset.change post, title: "New title"
case MyRepo.update post do
{:ok, struct} -> # Updated with success
{:error, changeset} -> # Something went wrong
end
"""
@callback update(changeset :: Ecto.Changeset.t(), opts :: Keyword.t()) ::
{:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
@doc """
Inserts or updates a changeset depending on whether the struct is persisted
or not.
The distinction whether to insert or update will be made on the
`Ecto.Schema.Metadata` field `:state`. The `:state` is automatically set by
Ecto when loading or building a schema.
Please note that for this to work, you will have to load existing structs from
the database. So even if the struct exists, this won't work:
struct = %Post{id: "existing_id", ...}
MyRepo.insert_or_update changeset
# => {:error, changeset} # id already exists
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the struct.
See the "Shared options" section at the module documentation.
## Example
result =
case MyRepo.get(Post, id) do
nil -> %Post{id: id} # Post not found, we build one
post -> post # Post exists, let's use it
end
|> Post.changeset(changes)
|> MyRepo.insert_or_update
case result do
{:ok, struct} -> # Inserted or updated with success
{:error, changeset} -> # Something went wrong
end
"""
@callback insert_or_update(changeset :: Ecto.Changeset.t(), opts :: Keyword.t()) ::
{:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
@doc """
Deletes a struct using its primary key.
If the struct has no primary key, `Ecto.NoPrimaryKeyFieldError`
will be raised. If the struct has been removed from db prior to
call, `Ecto.StaleEntryError` will be raised.
It returns `{:ok, struct}` if the struct has been successfully
deleted or `{:error, changeset}` if there was a validation
or a known constraint error.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the struct.
* `:stale_error_field` - The field where stale errors will be added in
the returning changeset. This option can be used to avoid raising
`Ecto.StaleEntryError`.
* `:stale_error_message` - The message to add to the configured
`:stale_error_field` when stale errors happen, defaults to "is stale".
See the "Shared options" section at the module documentation.
## Example
post = MyRepo.get!(Post, 42)
case MyRepo.delete post do
{:ok, struct} -> # Deleted with success
{:error, changeset} -> # Something went wrong
end
"""
@callback delete(
struct_or_changeset :: Ecto.Schema.t() | Ecto.Changeset.t(),
opts :: Keyword.t()
) :: {:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
@doc """
Same as `c:insert/2` but returns the struct or raises if the changeset is invalid.
"""
@callback insert!(
struct_or_changeset :: Ecto.Schema.t() | Ecto.Changeset.t(),
opts :: Keyword.t()
) :: Ecto.Schema.t()
@doc """
Same as `c:update/2` but returns the struct or raises if the changeset is invalid.
"""
@callback update!(changeset :: Ecto.Changeset.t(), opts :: Keyword.t()) ::
Ecto.Schema.t()
@doc """
Same as `c:insert_or_update/2` but returns the struct or raises if the changeset
is invalid.
"""
@callback insert_or_update!(changeset :: Ecto.Changeset.t(), opts :: Keyword.t()) ::
Ecto.Schema.t()
@doc """
Same as `c:delete/2` but returns the struct or raises if the changeset is invalid.
"""
@callback delete!(
struct_or_changeset :: Ecto.Schema.t() | Ecto.Changeset.t(),
opts :: Keyword.t()
) :: Ecto.Schema.t()
## Ecto.Adapter.Transaction
@optional_callbacks transaction: 2, in_transaction?: 0, rollback: 1
@doc """
Runs the given function or `Ecto.Multi` inside a transaction.
## Use with function
If an unhandled error occurs the transaction will be rolled back
and the error will bubble up from the transaction function.
If no error occurred the transaction will be committed when the
function returns. A transaction can be explicitly rolled back
by calling `c:rollback/1`, this will immediately leave the function
and return the value given to `rollback` as `{:error, value}`.
A successful transaction returns the value returned by the function
wrapped in a tuple as `{:ok, value}`.
If `c:transaction/2` is called inside another transaction, the function
is simply executed, without wrapping the new transaction call in any
way. If there is an error in the inner transaction and the error is
rescued, or the inner transaction is rolled back, the whole outer
transaction is marked as tainted, guaranteeing nothing will be committed.
## Use with Ecto.Multi
Besides functions transaction can be used with an Ecto.Multi struct.
Transaction will be started, all operations applied and in case of
success committed returning `{:ok, changes}`. In case of any errors
the transaction will be rolled back and
`{:error, failed_operation, failed_value, changes_so_far}` will be
returned.
You can read more about using transactions with `Ecto.Multi` as well as
see some examples in the `Ecto.Multi` documentation.
## Options
See the "Shared options" section at the module documentation.
## Examples
import Ecto.Changeset, only: [change: 2]
MyRepo.transaction(fn ->
MyRepo.update!(change(alice, balance: alice.balance - 10))
MyRepo.update!(change(bob, balance: bob.balance + 10))
end)
# Roll back a transaction explicitly
MyRepo.transaction(fn ->
p = MyRepo.insert!(%Post{})
if not Editor.post_allowed?(p) do
MyRepo.rollback(:posting_not_allowed)
end
end)
# With Ecto.Multi
Ecto.Multi.new
|> Ecto.Multi.insert(:post, %Post{})
|> MyRepo.transaction
"""
@callback transaction(fun_or_multi :: fun | Ecto.Multi.t(), opts :: Keyword.t()) ::
{:ok, any}
| {:error, any}
| {:error, Ecto.Multi.name(), any, %{Ecto.Multi.name() => any}}
@doc """
Returns true if the current process is inside a transaction.
If you are using the `Ecto.Adapters.SQL.Sandbox` in tests, note that even
though each test is inside a transaction, `in_transaction?/0` will only
return true inside transactions explicitly created with `transaction/2`. This
is done so the test environment mimics dev and prod.
If you are trying to debug transaction-related code while using
`Ecto.Adapters.SQL.Sandbox`, it may be more helpful to configure the database
to log all statements and consult those logs.
## Examples
MyRepo.in_transaction?
#=> false
MyRepo.transaction(fn ->
MyRepo.in_transaction? #=> true
end)
"""
@callback in_transaction?() :: boolean
@doc """
Rolls back the current transaction.
The transaction will return the value given as `{:error, value}`.
"""
@callback rollback(value :: any) :: no_return
end
| 35.762656 | 98 | 0.666288 |
9e13051018af197ec54323b8764e9798ad7254dc | 2,451 | ex | Elixir | apps/omg_api/test/support/state/prop_test/deposits.ex | SingularityMatrix/elixir-omg | 7db3fcc3adfa303e30ff7703148cc5110b587d20 | [
"Apache-2.0"
] | null | null | null | apps/omg_api/test/support/state/prop_test/deposits.ex | SingularityMatrix/elixir-omg | 7db3fcc3adfa303e30ff7703148cc5110b587d20 | [
"Apache-2.0"
] | null | null | null | apps/omg_api/test/support/state/prop_test/deposits.ex | SingularityMatrix/elixir-omg | 7db3fcc3adfa303e30ff7703148cc5110b587d20 | [
"Apache-2.0"
] | 2 | 2020-06-07T11:14:54.000Z | 2020-08-02T07:36:32.000Z | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.API.State.PropTest.Deposits do
@moduledoc """
Generates function needed to place deposit in propcheck test
"""
use PropCheck
alias OMG.API.PropTest.Generators
alias OMG.API.PropTest.Helper
def impl(deposits), do: OMG.API.State.PropTest.StateCoreGS.deposit(deposits)
def args(%{eth: %{blknum: blknum}}) do
let [number_of_deposit <- integer(1, 3)] do
[
for number <- 1..number_of_deposit do
let(
[
currency <- Generators.get_currency(),
%{addr: owner} <- Generators.entity(),
amount <- integer(10_000, 300_000)
],
do: %{blknum: blknum + number, currency: currency, owner: owner, amount: amount}
)
end
]
end
end
@doc "check if expected block has good blknum"
def pre(%{eth: %{blknum: blknum}}, [deposits]) do
list_block = deposits |> Enum.map(fn %{blknum: blknum} -> blknum end)
expected = for i <- (blknum + 1)..(blknum + length(deposits)), do: i
rem(blknum, 1000) + length(deposits) < 1000 and expected == list_block
end
def post(_state, [arg], {:ok, {_, db_update}}) do
new_utxo =
db_update
|> Enum.filter(&match?({:put, :utxo, _}, &1))
|> length
length(arg) == new_utxo
end
def next(%{eth: %{blknum: blknum} = eth, model: %{history: history, balance: balance} = model} = state, [args], _) do
new_balance = Enum.reduce(args, balance, fn %{amount: amount}, balance -> balance + amount end)
%{
state
| eth: %{eth | blknum: blknum + length(args)},
model: %{model | history: [{:deposits, Helper.format_deposits(args)} | history], balance: new_balance}
}
end
defmacro __using__(_opt) do
quote([location: :keep], do: defcommand(:deposits, do: unquote(Helper.create_delegate_to_defcommand(__MODULE__))))
end
end
| 34.041667 | 119 | 0.647899 |
9e13468390ad6f16535c99cab9512ca2e8c0eea7 | 2,792 | exs | Elixir | apps/writer_kafka/test/writer/kafka/topic_test.exs | NathanielScottStevens/hindsight | 0dda1a931cff85b62eb53d623cc59cdb970ec33a | [
"Apache-2.0"
] | null | null | null | apps/writer_kafka/test/writer/kafka/topic_test.exs | NathanielScottStevens/hindsight | 0dda1a931cff85b62eb53d623cc59cdb970ec33a | [
"Apache-2.0"
] | null | null | null | apps/writer_kafka/test/writer/kafka/topic_test.exs | NathanielScottStevens/hindsight | 0dda1a931cff85b62eb53d623cc59cdb970ec33a | [
"Apache-2.0"
] | null | null | null | defmodule Writer.Kafka.TopicTest do
use ExUnit.Case
use Divo
use Placebo
import AssertAsync
@moduletag integration: true, divo: true
alias Writer.Kafka.Topic
@server [localhost: 9092]
setup do
test = self()
handler_function = fn event_name, event_measurements, event_metadata, handler_config ->
send(
test,
{:telemetry_event, event_name, event_measurements, event_metadata, handler_config}
)
end
:telemetry.attach(__MODULE__, [:writer, :kafka, :produce], handler_function, %{})
on_exit(fn -> :telemetry.detach(__MODULE__) end)
:ok
end
test "topic writer will create topic and produce messages" do
{:ok, writer} =
start_supervised(
{Topic,
endpoints: @server,
topic: "topic-435",
metric_metadata: %{app: "testing", dataset_id: "ds1", subset_id: "sb1"}}
)
:ok = Topic.write(writer, ["message1", {"key2", "message2"}])
assert_async debug: true do
assert Elsa.topic?(@server, "topic-435")
{:ok, _count, messages} = Elsa.fetch(@server, "topic-435")
assert [{"", "message1"}, {"key2", "message2"}] == Enum.map(messages, &{&1.key, &1.value})
end
expected_metadata = %{app: "testing", dataset_id: "ds1", subset_id: "sb1", topic: "topic-435"}
assert_receive {:telemetry_event, [:writer, :kafka, :produce], %{count: 2},
^expected_metadata, %{}}
end
test "topic writer will report correct number of messages sent, in case of partial failure" do
allow Elsa.produce(any(), "topic-435", any(), any()),
return: {:error, "failure", ["message3"]}
{:ok, writer} = start_supervised({Topic, endpoints: @server, topic: "topic-435"})
assert {:error, "failure", ["message3"]} =
Topic.write(writer, ["message1", "message2", "message3"])
assert_receive {:telemetry_event, [:writer, :kafka, :produce], %{count: 2}, _, _}, 5_000
end
test "topic writer will allow custom partition to be defined" do
expect Elsa.produce(any(), "topic-123", any(), partitioner: :md5), return: :ok
config = %{
"kafka" => %{
"partitioner" => "md5"
}
}
{:ok, writer} =
start_supervised({Topic, endpoints: @server, topic: "topic-123", config: config})
assert :ok == Topic.write(writer, ["message1"])
end
test "will create topic with specified number of partitions" do
config = %{
"kafka" => %{
"partitions" => 4,
"partitioner" => "md5"
}
}
{:ok, _writer} =
start_supervised({Topic, endpoints: @server, topic: "topic-4p", config: config})
assert_async debug: true do
assert Elsa.topic?(@server, "topic-4p")
assert 4 == Elsa.Util.partition_count(@server, "topic-4p")
end
end
end
| 28.783505 | 98 | 0.614255 |
9e13476f84c7aa26a2e76f98c1de00d169fb4e54 | 1,262 | ex | Elixir | src/Elixir Practice/Keyword lists and Maps.ex | Fennec2000GH/Software-Engineering-Interview | c7a182d7f8c44f7cabaf77982099594ce297a48b | [
"MIT"
] | 1 | 2020-03-15T04:09:11.000Z | 2020-03-15T04:09:11.000Z | src/Elixir Practice/Keyword lists and Maps.ex | Fennec2000GH/Software-Engineering-Interview | c7a182d7f8c44f7cabaf77982099594ce297a48b | [
"MIT"
] | null | null | null | src/Elixir Practice/Keyword lists and Maps.ex | Fennec2000GH/Software-Engineering-Interview | c7a182d7f8c44f7cabaf77982099594ce297a48b | [
"MIT"
] | null | null | null |
# https://elixir-lang.org/getting-started/keywords-and-maps.html
# Keywword lists
kw = [{:a, 0}, {:b, 1}, {:c, 2}, {:d, 3}]
IO.write "kw = "; IO.inspect kw
IO.write "kw ++ [e: 4] = "; IO.inspect kw ++ [e: 4]
IO.write "kw -- [b: 1] = "; IO.inspect kw -- [b: 1]
IO.write "[a: 39] ++ kw = "; IO.inspect [a: 39] ++ kw
IO.puts "kw[:a] = #{kw[:a]}"
IO.puts "kw[:c] = #{kw[:c]}"
IO.puts "kw == [a: 0, b: 1, c: 2, d: 3]? #{kw == [a: 0, b: 1, c: 2, d: 3]}"
# Maps
map = %{:apple => "fruit", :banana => "fruit", "broccoli" => 'green', 39 => "number"}
IO.write "map = "; IO.inspect map
IO.puts "map[:apple] = #{map[:apple]}"
IO.puts "map[39] = #{map[39]}"
map = %{map | "broccoli" => nil, 39 => "thirty-nine"}
IO.puts "new map[\"broccoli\"] = #{map["broccoli"]}"
IO.puts "new map[39] = #{map[39]}"
atom_map = %{a: 0, b: 1, c: "cat"}
IO.write "atom_map = "; IO.inspect atom_map
IO.puts "atom_map.a = #{atom_map.a}, atom_map.b = #{atom_map.b}, atom_map.c = #{atom_map.c}"
# Nested data structures
users = [
john: %{name: "John", age: 27, languages: ["Erlang", "Ruby", "Elixir"]},
mary: %{name: "Mary", age: 29, languages: ["Elixir", "F#", "Clojure"]}
]
IO.write "users = "; IO.inspect users
IO.write "users[:john][:languages] = "; IO.inspect users[:john][:languages]
| 38.242424 | 92 | 0.553883 |
9e13522f80e7d64c158c6167678505cc142ba391 | 1,049 | exs | Elixir | mix.exs | guibbv2011/ritcoinex | 82fe1a31fcf3dde7facbacd04c5dcb46ba53430b | [
"MIT"
] | null | null | null | mix.exs | guibbv2011/ritcoinex | 82fe1a31fcf3dde7facbacd04c5dcb46ba53430b | [
"MIT"
] | 10 | 2021-06-09T23:24:29.000Z | 2022-02-26T15:06:58.000Z | mix.exs | guibbv2011/ritcoinex | 82fe1a31fcf3dde7facbacd04c5dcb46ba53430b | [
"MIT"
] | null | null | null | defmodule Ritcoinex.MixProject do
use Mix.Project
def project do
[
app: :ritcoinex,
version: "0.1.0",
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps(),
# Docs
name: "Ritcoinex",
source_url: "https://github.com/guibbv2011/ritcoinex",
#homepage_url: "http://YOUR_PROJECT_HOMEPAGE",
docs: [
main: "Ritcoinex", # The main page in the docs
#logo: "path/to/logo.png",
extras: ["README.md"]
]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:mnesia, :logger],
mod: {Ritcoinex, []}
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ex_crypto, "~> 0.10.0"},
{:ex_doc, "~> 0.24", only: :dev, runtime: false},
{:earmark, "~> 1.4"}
#{:gen_state_machine, "~> 3.0.0"}
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 23.840909 | 87 | 0.559581 |
9e135fe036adfb96523a375e3d46ea1d2ef7f4dc | 465 | exs | Elixir | config/dev.exs | elcritch/pjon-elixir | 3b80995826778dc86ea124c376bd01a42d0c305d | [
"Apache-2.0"
] | 3 | 2018-11-29T14:23:27.000Z | 2018-11-29T20:11:58.000Z | config/dev.exs | elcritch/pjon-elixir-serial | 3b80995826778dc86ea124c376bd01a42d0c305d | [
"Apache-2.0"
] | null | null | null | config/dev.exs | elcritch/pjon-elixir-serial | 3b80995826778dc86ea124c376bd01a42d0c305d | [
"Apache-2.0"
] | null | null | null | use Mix.Config
config :pjon_elixir_serial, :device, System.get_env("MIX_UART") || "ttyACM0"
config :pjon_elixir_serial, :compile_options,
packet_size: 128,
include_packet_id: true,
max_packets: 100,
packet_max_length: 64,
receive_while_sending_blocking: false,
max_recent_ids: 4096,
ts_response_timeout: 150000,
pjon_send_blocking: true,
rx_wait_time: 200000,
ts_max_attempts: 20,
bus_addr: 42,
tx_packet_addr: 47
| 24.473684 | 76 | 0.726882 |
9e136a1e20dec022b0a02605b1475935282ea6c5 | 1,675 | ex | Elixir | lib/x509/date_time.ex | r-icarus/x509 | 50bfc29d79a22d29903323c016cfd726a5fe2d78 | [
"BSD-3-Clause"
] | 75 | 2018-07-02T12:00:57.000Z | 2022-02-07T14:47:56.000Z | lib/x509/date_time.ex | r-icarus/x509 | 50bfc29d79a22d29903323c016cfd726a5fe2d78 | [
"BSD-3-Clause"
] | 30 | 2018-07-04T12:33:01.000Z | 2022-01-14T18:29:00.000Z | lib/x509/date_time.ex | r-icarus/x509 | 50bfc29d79a22d29903323c016cfd726a5fe2d78 | [
"BSD-3-Clause"
] | 12 | 2018-09-21T04:34:02.000Z | 2022-02-28T20:11:13.000Z | defmodule X509.DateTime do
@moduledoc false
# Builds an ASN.1 UTCTime (for years prior to 2050) or GeneralizedTime (for
# years starting with 2050)
def new() do
DateTime.utc_now() |> new()
end
def new(seconds) when is_integer(seconds) do
DateTime.utc_now() |> shift(seconds) |> new()
end
def new(%DateTime{year: year} = datetime) when year < 2050 do
iso = DateTime.to_iso8601(datetime, :basic)
[_, date, time] = Regex.run(~r/^\d\d(\d{6})T(\d{6})(?:\.\d+)?Z$/, iso)
{:utcTime, '#{date}#{time}Z'}
end
def new(datetime) do
iso = DateTime.to_iso8601(datetime, :basic)
[_, date, time] = Regex.run(~r/^(\d{8})T(\d{6})(?:\.\d+)?Z$/, iso)
{:generalTime, '#{date}#{time}Z'}
end
def to_datetime({:utcTime, time}) do
"20#{time}" |> to_datetime()
end
def to_datetime({:generalTime, time}) do
time |> to_string() |> to_datetime()
end
def to_datetime(
<<year::binary-size(4), month::binary-size(2), day::binary-size(2), hour::binary-size(2),
minute::binary-size(2), second::binary-size(2), "Z"::binary>>
) do
%DateTime{
year: String.to_integer(year),
month: String.to_integer(month),
day: String.to_integer(day),
hour: String.to_integer(hour),
minute: String.to_integer(minute),
second: String.to_integer(second),
time_zone: "Etc/UTC",
zone_abbr: "UTC",
utc_offset: 0,
std_offset: 0
}
end
# Shifts a DateTime value by a number of seconds (positive or negative)
defp shift(%DateTime{} = datetime, seconds) do
datetime
|> DateTime.to_unix()
|> Kernel.+(seconds)
|> DateTime.from_unix!()
end
end
| 27.916667 | 97 | 0.61194 |
9e136ce5d9ee934a0d01cbecafa3d7f7199e0330 | 2,101 | exs | Elixir | test/k8s/client/runner/wait_integration_test.exs | linkdd/k8s | a697818a7adf62abcf0f06a13ea283eb626b534d | [
"MIT"
] | 226 | 2019-02-03T00:49:32.000Z | 2022-03-30T15:02:22.000Z | test/k8s/client/runner/wait_integration_test.exs | linkdd/k8s | a697818a7adf62abcf0f06a13ea283eb626b534d | [
"MIT"
] | 109 | 2019-01-20T20:39:33.000Z | 2022-03-31T20:21:34.000Z | test/k8s/client/runner/wait_integration_test.exs | linkdd/k8s | a697818a7adf62abcf0f06a13ea283eb626b534d | [
"MIT"
] | 43 | 2019-02-07T01:18:31.000Z | 2022-03-08T04:15:33.000Z | defmodule K8s.Client.Runner.WaitIntegrationTest do
use ExUnit.Case, async: true
import K8s.Test.IntegrationHelper
setup do
timeout =
"TEST_WAIT_TIMEOUT"
|> System.get_env("5")
|> String.to_integer()
test_id = :rand.uniform(10_000)
{:ok, %{conn: conn(), test_id: test_id, timeout: timeout}}
end
@spec job(binary) :: K8s.Operation.t()
defp job(name) do
K8s.Client.create(%{
"apiVersion" => "batch/v1",
"kind" => "Job",
"metadata" => %{"name" => name, "namespace" => "default"},
"spec" => %{
"backoffLimit" => 1,
"template" => %{
"spec" => %{
"containers" => [
%{
"command" => ["perl", "-Mbignum=bpi", "-wle", "print bpi(3)"],
"image" => "perl",
"name" => "pi"
}
],
"restartPolicy" => "Never"
}
}
}
})
end
@tag integration: true
test "waiting on a job to finish successfully", %{
conn: conn,
test_id: test_id,
timeout: timeout
} do
create_job = job("wait-job-#{test_id}")
{:ok, _} = K8s.Client.run(conn, create_job)
op = K8s.Client.get("batch/v1", :job, namespace: "default", name: "wait-job-#{test_id}")
opts = [find: ["status", "succeeded"], eval: 1, timeout: timeout]
assert {:ok, result} = K8s.Client.Runner.Wait.run(conn, op, opts)
assert result["status"]["succeeded"] == 1
end
@tag integration: true
test "using an anonymous function to evaluate a job", %{
conn: conn,
test_id: test_id,
timeout: timeout
} do
create_job = job("wait-job-#{test_id}")
{:ok, _} = K8s.Client.run(conn, create_job)
op = K8s.Client.get("batch/v1", :job, namespace: "default", name: "wait-job-#{test_id}")
eval_fn = fn value_of_status_succeeded ->
value_of_status_succeeded == 1
end
opts = [find: ["status", "succeeded"], eval: eval_fn, timeout: timeout]
assert {:ok, result} = K8s.Client.Runner.Wait.run(conn, op, opts)
assert result["status"]["succeeded"] == 1
end
end
| 27.644737 | 92 | 0.556878 |
9e137ae5b4052b83e914e3da516f3319bc214c26 | 4,021 | exs | Elixir | test/kdf/pbkdf2_test.exs | impl/ex_crypto | 587aa8a63bd1768a361da207339b4a8d68560c22 | [
"Apache-2.0"
] | 2 | 2016-05-02T18:19:13.000Z | 2016-05-03T16:11:58.000Z | test/kdf/pbkdf2_test.exs | impl/ex_crypto | 587aa8a63bd1768a361da207339b4a8d68560c22 | [
"Apache-2.0"
] | null | null | null | test/kdf/pbkdf2_test.exs | impl/ex_crypto | 587aa8a63bd1768a361da207339b4a8d68560c22 | [
"Apache-2.0"
] | null | null | null | defmodule Cryptex.Kdf.Pbkdf2Test do
use ExUnit.Case
alias Cryptex.Kdf.Pbkdf2
alias Cryptex.Kdf.Pbkdf2.Mcf
alias Cryptex.Kdf.Pbkdf2.Result
alias Cryptex.Mac.Hmac
@golden_comeonin [
{"passDATAb00AB7YxDTT", "saltKEYbcTcXHCBxtjD", 100_000,
Result.new(
Hmac.new(:sha512), 100_000,
"c2FsdEtFWWJjVGNYSENCeHRqRA" |> Mcf.Alphabet.decode!,
"rM3Nh5iuXNhYBHOQFe8qEeMlkbe30W92gZswsNSdgOGr6myYIrgKH9/kIeJvVgPsqKR6ZMmgBPta.CKfdi/0Hw" |> Mcf.Alphabet.decode!)},
{"passDATAb00AB7YxDTTl", "saltKEYbcTcXHCBxtjD2", 100_000,
Result.new(
Hmac.new(:sha512), 100_000,
"c2FsdEtFWWJjVGNYSENCeHRqRDI" |> Mcf.Alphabet.decode!,
"WUJWsL1NbJ8hqH97pXcqeRoQ5hEGlPRDZc2UZw5X8a7NeX7x0QAZOHGQRMfwGAJml4Reua2X2X3jarh4aqtQlg" |> Mcf.Alphabet.decode!)},
{"passDATAb00AB7YxDTTlRH2dqxDx19GDxDV1zFMz7E6QVqKIzwOtMnlxQLttpE5",
"saltKEYbcTcXHCBxtjD2PnBh44AIQ6XUOCESOhXpEp3HrcGMwbjzQKMSaf63IJe",
100_000,
Result.new(
Hmac.new(:sha512), 100_000,
"c2FsdEtFWWJjVGNYSENCeHRqRDJQbkJoNDRBSVE2WFVPQ0VTT2hYcEVwM0hyY0dNd2JqelFLTVNhZjYzSUpl" |> Mcf.Alphabet.decode!,
"B0R0AchXZuSu1YPeLmv1pnXqvk82GCgclWFvT8H9/m7LwcOYJ4nU/ZQdZYTvU0p4vTeuAlVdlFXo8In9tN.2uw" |> Mcf.Alphabet.decode!)},
]
@golden_passlib [
{"password", <<36, 196, 248, 159, 51, 166, 84, 170, 213, 250, 159, 211, 154, 83, 10, 193>>, 19_000,
Result.new(
Hmac.new(:sha512), 19_000,
"JMT4nzOmVKrV.p/TmlMKwQ" |> Mcf.Alphabet.decode!,
"jKbZHoPwUWBT08pjb/CnUZmFcB9JW4dsOzVkfi9X6Pdn5NXWeY.mhL1Bm4V9rjYL5ZfA32uh7Gl2gt5YQa/JCA" |> Mcf.Alphabet.decode!)},
{"p@$$w0rd", <<252, 159, 83, 202, 89, 107, 141, 17, 66, 200, 121, 239, 29, 163, 20, 34>>, 19_000,
Result.new(
Hmac.new(:sha512), 19_000,
"/J9TyllrjRFCyHnvHaMUIg" |> Mcf.Alphabet.decode!,
"AJ3Dr926ltK1sOZMZAAoT7EoR7R/Hp.G6Bt.4DFENiYayhVM/ZBPuqjFNhcE9NjTmceTmLnSqzfEQ8mafy49sw" |> Mcf.Alphabet.decode!)},
{"oh this is hard 2 guess", <<1, 96, 140, 17, 162, 84, 42, 165, 84, 42, 165, 244, 62, 71, 136, 177>>, 19_000,
Result.new(
Hmac.new(:sha512), 19_000,
"AWCMEaJUKqVUKqX0PkeIsQ" |> Mcf.Alphabet.decode!,
"F0xkzJUOKaH8pwAfEwLeZK2/li6CF3iEcpfoJ1XoExQUTStXCNVxE1sd1k0aeQlSFK6JnxJOjM18kZIdzNYkcQ" |> Mcf.Alphabet.decode!)},
{"even more difficult", <<215, 186, 87, 42, 133, 112, 14, 1, 160, 52, 38, 100, 44, 229, 92, 203>>, 19_000,
Result.new(
Hmac.new(:sha512), 19_000,
"17pXKoVwDgGgNCZkLOVcyw" |> Mcf.Alphabet.decode!,
"TEv9woSaVTsYHLxXnFbWO1oKrUGfUAljkLnqj8W/80BGaFbhccG8B9fZc05RoUo7JQvfcwsNee19g8GD5UxwHA" |> Mcf.Alphabet.decode!)},
]
test "known keys are derived correctly" do
(@golden_comeonin ++ @golden_passlib) |> Enum.map(fn {secret, salt, rounds, computed} ->
assert Pbkdf2.derive(Hmac.new(:sha512), secret, salt, rounds: rounds) == computed
end)
end
test "derive with function is equivalent to new followed by derive" do
assert Pbkdf2.new(Hmac.new(:sha512)) |> Pbkdf2.derive("test", "salt") == Pbkdf2.derive(Hmac.new(:sha512), "test", "salt")
end
test "salt generation works correctly" do
assert byte_size(Pbkdf2.derive(Hmac.new(:sha512), "test") |> Result.salt) == 16
assert byte_size(Pbkdf2.derive(Hmac.new(:sha512), "test", nil, salt_size: 32) |> Result.salt) == 32
assert_raise ArgumentError, ~r/must be at least/i, fn ->
Pbkdf2.derive(Hmac.new(:sha512), "test", nil, salt_size: 4)
end
end
test "invalid hash size raises" do
assert_raise ArgumentError, ~r/must be a multiple/i, fn ->
Pbkdf2.derive(Hmac.new(:sha512), "test", "salt", hash_size: 42)
end
assert_raise ArgumentError, ~r/hash size must be greater than 0/i, fn ->
Pbkdf2.derive(Hmac.new(:sha512), "test", "salt", hash_size: 0)
end
end
test "invalid number of rounds raises" do
assert_raise ArgumentError, ~r/rounds must be greater than 0/i, fn ->
Pbkdf2.derive(Hmac.new(:sha512), "test", "salt", rounds: 0)
end
end
end
| 46.218391 | 125 | 0.691122 |
9e1395c52951a93a2cac7d112fae4155094fd1bf | 182 | exs | Elixir | config/config.exs | clone1018/excerpt | eef37f7c144bd4c477444b809da73c821b754c2b | [
"MIT"
] | 1 | 2021-08-12T20:37:44.000Z | 2021-08-12T20:37:44.000Z | config/config.exs | clone1018/excerpt | eef37f7c144bd4c477444b809da73c821b754c2b | [
"MIT"
] | null | null | null | config/config.exs | clone1018/excerpt | eef37f7c144bd4c477444b809da73c821b754c2b | [
"MIT"
] | null | null | null | use Mix.Config
config :logger, :console, format: "$time $metadata[$level] $levelpad$message\n"
if File.exists?("config/#{Mix.env()}.exs") do
import_config "#{Mix.env()}.exs"
end
| 22.75 | 79 | 0.681319 |
9e13977b78484ae6f9d869b350f6c3fd0770c197 | 1,162 | exs | Elixir | config/test.exs | DasThink/console | 54cde903e3f4e2ca93b14c1d3a7bf3b8a2c7372b | [
"Apache-2.0"
] | 1 | 2021-08-20T17:48:47.000Z | 2021-08-20T17:48:47.000Z | config/test.exs | mfalkvidd/console | 6427c82bc4f8619b5bb3a5940099a8bdd6167a9e | [
"Apache-2.0"
] | null | null | null | config/test.exs | mfalkvidd/console | 6427c82bc4f8619b5bb3a5940099a8bdd6167a9e | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :console, ConsoleWeb.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :console, Console.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "console_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
config :console, Console.Mailer,
adapter: Bamboo.TestAdapter
config :console, env: Mix.env
config :cloak, Cloak.AES.CTR,
tag: "AES",
default: true,
keys: [
%{tag: <<1>>, key: :base64.decode("/QCxhn/9t2SM8GiLXVDR1jFO/vENLGCnXADGAhGPM30="), default: true}
]
config :comeonin, :bcrypt_log_rounds, 4
config :comeonin, :pbkdf2_rounds, 1
config :console, oui: 1
config :console, :access_token_decoder, Console.AccessTokenDecoder.MockDecodeAccessToken
config :console,
router_secrets: [
"1524243720:2JD3juUA9RGaOf3Fpj7fNOylAgZ/jAalgOe45X6+jW4sy9gyCy1ELJrIWKvrgMx/"
],
blockchain_api_retry: "1",
blockchain_api_url: "https://api.helium.wtf/v1"
| 25.822222 | 101 | 0.740103 |
9e13a1618dfce233ac31a56130d560b08821d513 | 802 | exs | Elixir | discuss/test/discuss_web/channels/comment_channel_test.exs | ivoferro/elixir_phoenix_bootcamp | e3445dbf90c1eea81e8aa34cc7801934a516d7d7 | [
"MIT"
] | null | null | null | discuss/test/discuss_web/channels/comment_channel_test.exs | ivoferro/elixir_phoenix_bootcamp | e3445dbf90c1eea81e8aa34cc7801934a516d7d7 | [
"MIT"
] | null | null | null | discuss/test/discuss_web/channels/comment_channel_test.exs | ivoferro/elixir_phoenix_bootcamp | e3445dbf90c1eea81e8aa34cc7801934a516d7d7 | [
"MIT"
] | null | null | null | defmodule DiscussWeb.CommentChannelTest do
use DiscussWeb.ChannelCase
alias DiscussWeb.CommentChannel
setup do
{:ok, _, socket} =
socket("user_id", %{some: :assign})
|> subscribe_and_join(CommentChannel, "comment:lobby")
{:ok, socket: socket}
end
test "ping replies with status ok", %{socket: socket} do
ref = push socket, "ping", %{"hello" => "there"}
assert_reply ref, :ok, %{"hello" => "there"}
end
test "shout broadcasts to comment:lobby", %{socket: socket} do
push socket, "shout", %{"hello" => "all"}
assert_broadcast "shout", %{"hello" => "all"}
end
test "broadcasts are pushed to the client", %{socket: socket} do
broadcast_from! socket, "broadcast", %{"some" => "data"}
assert_push "broadcast", %{"some" => "data"}
end
end
| 27.655172 | 66 | 0.637157 |
9e13c7e092f0047584daf9e9a1995a04273e29e5 | 3,133 | ex | Elixir | lib/openmaize/config.ex | elixircnx/sanction | 5b270fd6eef980d37c06429271f64ec14e0f622d | [
"BSD-3-Clause"
] | 130 | 2016-06-21T07:58:46.000Z | 2022-01-01T21:45:23.000Z | lib/openmaize/config.ex | elixircnx/sanction | 5b270fd6eef980d37c06429271f64ec14e0f622d | [
"BSD-3-Clause"
] | 50 | 2016-06-29T16:01:42.000Z | 2019-08-07T21:33:49.000Z | lib/openmaize/config.ex | elixircnx/sanction | 5b270fd6eef980d37c06429271f64ec14e0f622d | [
"BSD-3-Clause"
] | 20 | 2016-07-02T11:37:33.000Z | 2018-10-26T19:12:41.000Z | defmodule Openmaize.Config do
@moduledoc """
This module provides an abstraction layer for configuration.
The following are valid configuration items.
| name | type | default |
| :----------------- | :----------- | ---------------: |
| crypto_mod | module | Comeonin.Bcrypt |
| hash_name | atom | :password_hash |
| log_level | atom | :info |
| drop_user_keys | list of atoms | [] |
| password_min_len | integer | 8 |
| remember_salt | string | N/A |
## Examples
The simplest way to change the default values would be to add
an `openmaize` entry to the `config.exs` file in your project,
like the following example.
config :openmaize,
crypto_mod: Comeonin.Bcrypt,
hash_name: :encrypted_password,
drop_user_keys: [:shoe_size],
password_min_len: 12
"""
@doc """
The password hashing and checking algorithm. Bcrypt is the default.
You can supply any module, but the module must implement the following
functions:
* hashpwsalt/1 - hashes the password
* checkpw/2 - given a password and a salt, returns if match
* dummy_checkpw/0 - performs a hash and returns false
See Comeonin.Bcrypt for examples.
"""
def crypto_mod do
Application.get_env(:openmaize, :crypto_mod, Comeonin.Bcrypt)
end
@doc """
The name in the database for the password hash.
If, for example, you are migrating from Devise, you will need to
change this to `encrypted_password`.
"""
def hash_name do
Application.get_env(:openmaize, :hash_name, :password_hash)
end
@doc """
The log level for Openmaize logs.
This should either be an atom, :debug, :info, :warn or :error, or
false.
The default is :info, which means that :info, :warn and :error logs
will be returned.
"""
def log_level do
Application.get_env(:openmaize, :log_level, :info)
end
@doc """
The keys that are removed from the user struct before it is passed
on to another function.
This should be a list of atoms.
By default, :password_hash (or the value for hash_name), :password,
:otp_secret, :confirmation_token and :reset_token are removed, and
this option allows you to add to this list.
"""
def drop_user_keys do
Application.get_env(:openmaize, :drop_user_keys, []) ++
[hash_name(), :password, :otp_secret, :confirmation_token, :reset_token]
end
@doc """
Minimum length for the password strength check.
The default minimum length is 8.
The Openmaize.Password module provides a basic check and an advanced
check, both of which use the `password_min_len` value. For more
information about the advanced check, see the documentation for
the Openmaize.Password module.
"""
def password_min_len do
Application.get_env(:openmaize, :password_min_len, 8)
end
@doc """
Salt to be used when signing and verifying the `remember me` cookie.
"""
def remember_salt do
Application.get_env(:openmaize, :remember_salt)
end
end
| 29.556604 | 76 | 0.654325 |
9e1404f9b56d4e2fc04f3cd2a5e3092ef92558d9 | 921 | exs | Elixir | template/config/config.exs | fishcakez/phoenix | 97fbd73a475ae918ef29a87ad580ab2ab6d967d2 | [
"MIT"
] | null | null | null | template/config/config.exs | fishcakez/phoenix | 97fbd73a475ae918ef29a87ad580ab2ab6d967d2 | [
"MIT"
] | null | null | null | template/config/config.exs | fishcakez/phoenix | 97fbd73a475ae918ef29a87ad580ab2ab6d967d2 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
use Mix.Config
# Note this file is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project.
config :phoenix, <%= application_module %>.Router,
port: System.get_env("PORT"),
ssl: false,
static_assets: true,
cookies: true,
session_key: "_<%= application_name %>_key",
session_secret: "<%= session_secret %>",
catch_errors: true,
debug_errors: false,
error_controller: <%= application_module %>.PageController
config :phoenix, :code_reloader,
enabled: false
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Import environment specific config. Note, this must remain at the bottom of
# this file to properly merge your previous config entries.
import_config "#{Mix.env}.exs"
| 31.758621 | 77 | 0.741585 |
9e14328cccc1dfe4a3d5c3c900fa7d75d7310e25 | 605 | exs | Elixir | priv/repo/migrations/20160917022654_create_op.exs | usecanvas/api-v2 | 59214db3a2cf12eb939f22fed320fd10cb47cdfe | [
"Apache-2.0"
] | 123 | 2017-04-04T18:15:48.000Z | 2021-04-26T08:04:22.000Z | priv/repo/migrations/20160917022654_create_op.exs | usecanvas/api-v2 | 59214db3a2cf12eb939f22fed320fd10cb47cdfe | [
"Apache-2.0"
] | null | null | null | priv/repo/migrations/20160917022654_create_op.exs | usecanvas/api-v2 | 59214db3a2cf12eb939f22fed320fd10cb47cdfe | [
"Apache-2.0"
] | 17 | 2017-04-04T18:58:29.000Z | 2021-05-10T21:39:16.000Z | defmodule CanvasAPI.Repo.Migrations.CreateOp do
use Ecto.Migration
def change do
create table(:ops, primary_key: false) do
add :components, :jsonb, null: false
add :meta, :jsonb, null: false
add :seq, :integer, null: false
add :source, :text, null: false
add :version, :integer, null: false
add :canvas_id, references(:canvases, on_delete: :delete_all, type: :text), null: false
add :inserted_at, :timestamptz, null: false
add :updated_at, :timestamptz, null: false
end
create index(:ops, [:canvas_id, :version], unique: true)
end
end
| 28.809524 | 93 | 0.66281 |
9e14554e98c8bc7f87ed80ca1799adc29e8ad695 | 491 | ex | Elixir | lib/forage/query_builder/sort_field.ex | tmbb/forage | dcda0403e13faeadc83b64961e4073bede25bc00 | [
"MIT"
] | 14 | 2018-11-28T11:33:47.000Z | 2021-09-12T08:30:40.000Z | lib/forage/query_builder/sort_field.ex | tmbb/forage | dcda0403e13faeadc83b64961e4073bede25bc00 | [
"MIT"
] | 1 | 2019-07-01T22:57:18.000Z | 2019-07-01T22:57:18.000Z | lib/forage/query_builder/sort_field.ex | tmbb/forage | dcda0403e13faeadc83b64961e4073bede25bc00 | [
"MIT"
] | 3 | 2019-08-07T06:41:34.000Z | 2022-03-22T16:02:06.000Z | defmodule Forage.QueryBuilder.SortField do
@moduledoc false
def build_order_by_clause(sort_data) do
# Return a keyword list
for row <- sort_data do
# May not exist if the user hasn't specified it.
# By default, sort results in ascending order
direction = row[:direction] || :asc
# Will always existe becuase of how the keyword list is constructed
field = row[:field]
# Return the pair
{direction, field}
end
end
end | 30.6875 | 74 | 0.655804 |
9e145ee1d565dff0df001d7d55526f1c873c0d7f | 1,125 | ex | Elixir | lib/dnsierge_web/router.ex | jcamenisch/dnsierge | cfc1654ce0b2c34c8952845aa0df6113ff3bb406 | [
"MIT"
] | null | null | null | lib/dnsierge_web/router.ex | jcamenisch/dnsierge | cfc1654ce0b2c34c8952845aa0df6113ff3bb406 | [
"MIT"
] | null | null | null | lib/dnsierge_web/router.ex | jcamenisch/dnsierge | cfc1654ce0b2c34c8952845aa0df6113ff3bb406 | [
"MIT"
] | null | null | null | defmodule DnsiergeWeb.Router do
use DnsiergeWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", DnsiergeWeb do
pipe_through :browser
get "/", PageController, :index
get "/domains", DomainController, :index
end
# Other scopes may use custom stacks.
# scope "/api", DnsiergeWeb do
# pipe_through :api
# end
# Enables LiveDashboard only for development
#
# If you want to use the LiveDashboard in production, you should put
# it behind authentication and allow only admins to access it.
# If your application does not have an admins-only section yet,
# you can use Plug.BasicAuth to set up some basic authentication
# as long as you are also using SSL (which you should anyway).
if Mix.env() in [:dev, :test] do
import Phoenix.LiveDashboard.Router
scope "/" do
pipe_through :browser
live_dashboard "/dashboard", metrics: DnsiergeWeb.Telemetry
end
end
end
| 25.568182 | 70 | 0.699556 |
9e1462e6d79a57495ced4bc145aa67acf3e701e3 | 863 | exs | Elixir | test/web/controllers/session_controller_test.exs | christhekeele/ex_venture | 4f4b329f50a133e219969f9823144a4cb9bf738d | [
"MIT"
] | 610 | 2017-08-09T15:20:25.000Z | 2022-03-27T15:49:07.000Z | test/web/controllers/session_controller_test.exs | christhekeele/ex_venture | 4f4b329f50a133e219969f9823144a4cb9bf738d | [
"MIT"
] | 69 | 2017-09-23T04:02:30.000Z | 2022-03-19T21:08:21.000Z | test/web/controllers/session_controller_test.exs | christhekeele/ex_venture | 4f4b329f50a133e219969f9823144a4cb9bf738d | [
"MIT"
] | 85 | 2017-09-23T04:07:11.000Z | 2021-11-20T06:44:56.000Z | defmodule Web.SessionControllerTest do
use Web.ConnCase
describe "signing in" do
test "valid", %{conn: conn} do
{:ok, user} =
TestHelpers.create_user(%{
email: "[email protected]",
password: "password"
})
conn =
post(conn, Routes.session_path(conn, :create),
user: [email: user.email, password: "password"]
)
assert redirected_to(conn) == Routes.page_path(conn, :index)
end
test "invalid", %{conn: conn} do
{:ok, user} =
TestHelpers.create_user(%{
email: "[email protected]",
password: "password"
})
conn =
post(conn, Routes.session_path(conn, :create),
user: [email: user.email, password: "invalid"]
)
assert redirected_to(conn) == Routes.session_path(conn, :new)
end
end
end
| 23.972222 | 67 | 0.565469 |
9e14646097805250cd58df6f1290059fef81239b | 583 | ex | Elixir | lib/books_web/live/passive_ability_live/show.ex | nickagliano/books | eec595ed4add9d678278785d9ab10106e1e426d1 | [
"MIT"
] | null | null | null | lib/books_web/live/passive_ability_live/show.ex | nickagliano/books | eec595ed4add9d678278785d9ab10106e1e426d1 | [
"MIT"
] | null | null | null | lib/books_web/live/passive_ability_live/show.ex | nickagliano/books | eec595ed4add9d678278785d9ab10106e1e426d1 | [
"MIT"
] | null | null | null | defmodule BooksWeb.PassiveAbilityLive.Show do
@moduledoc """
Show passive ability
"""
use BooksWeb, :live_view
alias Books.PassiveAbilities
@impl true
def mount(_params, _session, socket) do
{:ok, socket}
end
@impl true
def handle_params(%{"id" => id}, _, socket) do
{:noreply,
socket
|> assign(:page_title, page_title(socket.assigns.live_action))
|> assign(:passive_ability, PassiveAbilities.get_passive_ability!(id))}
end
defp page_title(:show), do: "Show Passive ability"
defp page_title(:edit), do: "Edit Passive ability"
end
| 23.32 | 76 | 0.692967 |
9e147564dd06baf897041d7b45eae6840e9539dc | 4,024 | ex | Elixir | apps/extended_api/lib/extended_api/worker/get_trytes/helper/helper.ex | iotaledger/chronicle | 73566e5613268e4b0c5951265ae4760cedb4051f | [
"Apache-2.0"
] | 19 | 2019-09-17T18:14:36.000Z | 2021-12-06T07:29:27.000Z | apps/extended_api/lib/extended_api/worker/get_trytes/helper/helper.ex | iotaledger/chronicle | 73566e5613268e4b0c5951265ae4760cedb4051f | [
"Apache-2.0"
] | 5 | 2019-09-30T04:57:14.000Z | 2020-11-10T15:41:03.000Z | apps/extended_api/lib/extended_api/worker/get_trytes/helper/helper.ex | iotaledger/chronicle | 73566e5613268e4b0c5951265ae4760cedb4051f | [
"Apache-2.0"
] | 2 | 2019-09-17T19:03:16.000Z | 2021-03-01T01:04:31.000Z | defmodule ExtendedApi.Worker.GetTrytes.Helper do
@moduledoc """
This module hold all the required helper functions
Which is gonna be used by GetTrytes worker and its
row compute modules(bundle_fn.ex, edge_fn.ex)
"""
alias ExtendedApi.Worker.{GetTrytes, GetTrytes.BundleFn, GetTrytes.EdgeFn}
alias ExtendedApi.Worker.GetTrytes.Helper
alias Core.DataModel.{Keyspace.Tangle, Table.Bundle, Table.Edge}
import OverDB.Builder.Query
@edge_cql "SELECT lb,ts,v2,ex,ix,el,lx FROM tangle.edge WHERE v1 = ? AND lb = 30"
@bundle_cql "SELECT lb,va,a,c,d,e,f,g,h,i FROM tangle.bundle WHERE bh = ? AND lb IN ? AND ts = ? AND ix = ? AND id IN ?"
# Start of Helper functions for Edge table queries ###########################
@spec queries(list, map,list, list, integer) :: tuple
def queries(hashes, state, queries_states_list \\ [], trytes_list \\ [], ref \\ 0)
def queries(hashes, state, queries_states_list, trytes_list, ref) do
_queries(hashes, state, queries_states_list, trytes_list, ref)
end
@spec queries(list, map,list, list, integer) :: tuple
defp _queries([hash | rest], state, queries_states_list, trytes_list, ref) when is_binary(hash) do
# first we create edge query
{ok?, _,q_s} = edge_query(hash, ref)
# we ask this function to put query_state and proceed or break.
_queries(ok?, rest, state, queries_states_list, trytes_list, ref, q_s)
end
@spec _queries(list, map, list,list, integer) :: tuple
defp _queries([], state, queries_states_list, trytes_list, ref) do
{:ok, Enum.into(queries_states_list, %{ref: ref, trytes: trytes_list})|> Map.merge(state)}
end
@spec _queries(list, map, list,list, integer) :: tuple
defp _queries(_, _, _, _,_) do
{:error, :invalid}
end
@spec _queries(atom, list, map, list,list, integer, map) :: tuple
defp _queries(:ok ,rest, state, queries_states_list, trytes_list, ref, q_s) do
# :ok indicates ref => q_s has been received by the shard's stage.
# now loop through the rest(rest_hashes) with updated queries_states_list/ref/trytes_list.
_queries(rest, state, [{ref, q_s} | queries_states_list], [nil | trytes_list], ref+1)
end
@spec _queries(term, list, map, list,list, integer, map) :: tuple
defp _queries(ok?,_, _, _, _, _,_) do
{:error, ok?}
end
@spec edge_query(binary, integer, map) :: tuple
def edge_query(hash, ref, opts \\ nil) do
{Tangle, Edge}
|> select([:lb,:ts,:v2,:ex,:ix,:el,:lx]) |> type(:stream) |> assign(hash: hash)
|> cql(@edge_cql)
|> values([{:blob, hash}])
|> opts(opts || %{function: {EdgeFn, :bundle_queries, [ref]}})
|> pk([v1: hash]) |> prepare?(true) |> reference({:edge, ref})
|> GetTrytes.query()
end
# Start of Helper functions for Bundle table queries #########################
@doc """
This function generates and execute bundle query.
"""
def bundle_query(bh,addr_lb,tx_lb,ts,ix,lx,ex,ref, opts \\ nil, acc \\ %{}) do
{Tangle, Bundle}
|> select([:lb, :va, :a, :c, :d, :e, :f, :g, :h, :i]) |> type(:stream)
# NOTE: we had to use this statement till ScyllaDB's bug get resolved (https://github.com/scylladb/scylla/issues/4509)
|> cql(@bundle_cql) # check at the top of module to know the current cql statement.
|> assign(acc: acc)
|> values([{:blob, bh}, {{:list, :tinyint}, [addr_lb, tx_lb]}, {:varint, ts}, {:varint, ix}, {{:list, :blob}, ["addr", ex]}])
|> pk([bh: bh]) |> prepare?(true) |> reference({:bundle, ref})
|> opts(opts || %{function: {BundleFn, :construct, [bh,addr_lb,tx_lb,ts,ix,lx,ex,ref]}})
|> GetTrytes.query()
end
@doc """
This function generates and execute bundle_query from opts
It's intended to make sure to add the paging_state(if any)
and append the arguments ( bh, addr_lb,tx_lb, etc)
"""
@spec bundle_query_from_opts_acc(map,map) :: tuple
def bundle_query_from_opts_acc(%{function: {_, _, args}} = opts, acc) do
apply(ExtendedApi.Worker.GetTrytes.Helper, :bundle_query, args ++ [opts,acc])
end
end
| 43.73913 | 129 | 0.6583 |
9e1510fda1500c302ce58942763d7bb89b3e4dca | 7,155 | ex | Elixir | lib/teiserver_web/router.ex | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | null | null | null | lib/teiserver_web/router.ex | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | null | null | null | lib/teiserver_web/router.ex | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | null | null | null | defmodule TeiserverWeb.Router do
defmacro __using__(_opts \\ []) do
quote do
import unquote(__MODULE__)
end
end
defmacro teiserver_routes() do
quote do
scope "/", TeiserverWeb.General, as: :ts_general do
pipe_through([:browser, :blank_layout])
get("/code_of_conduct", GeneralController, :code_of_conduct)
get("/privacy_policy", GeneralController, :gdpr)
get("/gdpr", GeneralController, :gdpr)
end
scope "/teiserver", TeiserverWeb.General, as: :ts_general do
pipe_through([:browser, :admin_layout, :protected])
get("/", GeneralController, :index)
end
# ts_account_X_path
scope "/teiserver/account", TeiserverWeb.Account, as: :ts_account do
pipe_through([:browser, :admin_layout, :protected])
get("/relationships", RelationshipsController, :index)
post("/relationships/find/", RelationshipsController, :find)
post("/relationships/create/:action/:target", RelationshipsController, :create)
put("/relationships/update/:action/:target", RelationshipsController, :update)
delete("/relationships/delete/:action/:target", RelationshipsController, :delete)
resources("/preferences", PreferencesController,
only: [:index, :edit, :update, :new, :create]
)
get("/", GeneralController, :index)
end
# ts_clans_X_path
scope "/teiserver/clans", TeiserverWeb.Clans, as: :ts_clans do
pipe_through([:browser, :admin_layout, :protected])
get("/", ClanController, :index)
get("/:name", ClanController, :show)
put("/update/:clan_id", ClanController, :update)
get("/set_default/:id", ClanController, :set_default)
post("/create_invite", ClanController, :create_invite)
delete("/delete_invite/:clan_id/:user_id", ClanController, :delete_invite)
put("/respond_to_invite/:clan_id/:response", ClanController, :respond_to_invite)
delete("/delete_membership/:clan_id/:user_id", ClanController, :delete_membership)
put("/promote/:clan_id/:user_id", ClanController, :promote)
put("/demote/:clan_id/:user_id", ClanController, :demote)
end
scope "/teiserver/games", TeiserverWeb.Game, as: :ts_game do
pipe_through([:browser, :admin_layout, :protected])
resources("/tournaments", TournamentController)
resources("/queues", QueueController)
end
scope "/teiserver/battle", TeiserverWeb.Battle, as: :ts_battle do
pipe_through([:browser, :admin_layout, :protected])
get("/", GeneralController, :index)
end
scope "/teiserver/battle", TeiserverWeb.Battle, as: :ts_battle do
pipe_through([:browser, :admin_layout, :protected])
resources("/matches", MatchController, only: [:index, :show, :delete])
end
scope "/teiserver/battle", TeiserverWeb.Battle.LobbyLive, as: :ts_battle do
pipe_through([:browser, :admin_layout, :protected])
live("/lobbies", Index, :index)
live("/lobbies/:id", Show, :show)
end
scope "/teiserver/game_live", TeiserverWeb.Matchmaking.QueueLive, as: :ts_game do
pipe_through([:browser, :admin_layout, :protected])
live("/queues", Index, :index)
live("/queues/:id", Show, :show)
end
# REPORTING
scope "/teiserver/reports", TeiserverWeb.Report, as: :ts_reports do
pipe_through([:browser, :admin_layout, :protected])
get("/", GeneralController, :index)
get("/day_metrics/today", MetricController, :day_metrics_today)
get("/day_metrics/show/:date", MetricController, :day_metrics_show)
get("/day_metrics/export/:date", MetricController, :day_metrics_export)
get("/day_metrics/graph", MetricController, :day_metrics_graph)
post("/day_metrics/graph", MetricController, :day_metrics_graph)
get("/day_metrics", MetricController, :day_metrics_list)
post("/day_metrics", MetricController, :day_metrics_list)
get("/client_events/export/form", ClientEventController, :export_form)
post("/client_events/export/post", ClientEventController, :export_post)
get("/client_events/summary", ClientEventController, :summary)
get("/client_events/property/:property_name/detail", ClientEventController, :property_detail)
get("/client_events/event/:event_name/detail", ClientEventController, :event_detail)
get("/show/:name", ReportController, :show)
post("/show/:name", ReportController, :show)
end
# ts_engine_X_path
scope "/teiserver/engine", TeiserverWeb.Engine, as: :ts_engine do
pipe_through([:browser, :admin_layout, :protected])
resources("/unit", UnitController)
end
# API
scope "/teiserver/api", TeiserverWeb.API do
pipe_through :api
post "/login", SessionController, :login
end
scope "/teiserver/api", TeiserverWeb.API do
pipe_through([:token_api])
post "/battle/create", BattleController, :create
end
# ADMIN
scope "/teiserver/admin", TeiserverWeb.ClientLive, as: :ts_admin do
pipe_through([:browser, :admin_layout, :protected])
live("/client", Index, :index)
live("/client/:id", Show, :show)
end
scope "/teiserver/admin", TeiserverWeb.AgentLive, as: :ts_admin do
pipe_through([:browser, :admin_layout, :protected])
live("/agent", Index, :index)
# live("/agent/:id", Show, :show)
end
scope "/teiserver/admin", TeiserverWeb.Admin, as: :ts_admin do
pipe_through([:browser, :admin_layout, :protected])
get("/", GeneralController, :index)
get("/metrics", GeneralController, :metrics)
get("/tools", ToolController, :index)
get("/tools/convert", ToolController, :convert_form)
post("/tools/convert_post", ToolController, :convert_post)
post("/clans/create_membership", ClanController, :create_membership)
delete("/clans/delete_membership/:clan_id/:user_id", ClanController, :delete_membership)
delete("/clans/delete_invite/:clan_id/:user_id", ClanController, :delete_invite)
put("/clans/promote/:clan_id/:user_id", ClanController, :promote)
put("/clans/demote/:clan_id/:user_id", ClanController, :demote)
resources("/clans", ClanController)
resources("/parties", PartyController)
# resources("/tournaments", TournamentController)
get("/users/reset_password/:id", UserController, :reset_password)
get("/users/action/:id/:action", UserController, :perform_action)
put("/users/action/:id/:action", UserController, :perform_action)
get("/users/reports/:id/respond", UserController, :respond_form)
put("/users/reports/:id/respond", UserController, :respond_post)
get("/users/smurf_search/:id", UserController, :smurf_search)
get("/users/search", UserController, :index)
post("/users/search", UserController, :search)
resources("/user", UserController)
end
end
end
end
| 38.88587 | 101 | 0.659119 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.