hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9e45404620127a640029e2f9e0d5dfc99871f3c6 | 1,031 | ex | Elixir | apps/ewallet_api/lib/ewallet_api/application.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | 2 | 2019-07-13T05:49:03.000Z | 2021-08-19T23:58:23.000Z | apps/ewallet_api/lib/ewallet_api/application.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_api/lib/ewallet_api/application.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | 3 | 2018-05-08T17:15:42.000Z | 2021-11-10T04:08:33.000Z | defmodule EWalletAPI.Application do
@moduledoc """
EWalletAPI's startup and shutdown functionalities
"""
use Application
alias EWalletAPI.Endpoint
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the endpoint when the application starts
supervisor(EWalletAPI.Endpoint, []),
# Start your own worker by calling:
# EWalletAPI.Worker.start_link(arg1, arg2, arg3)
# worker(EWalletAPI.Worker, [arg1, arg2, arg3]),
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: EWalletAPI.Supervisor]
:ok = :error_logger.add_report_handler(Sentry.Logger)
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
Endpoint.config_change(changed, removed)
:ok
end
end
| 28.638889 | 64 | 0.71387 |
9e458e6befec9d202f32fe1b613a742e907a4810 | 4,590 | ex | Elixir | lib/livebook/config.ex | benjreinhart/livebook | 0500ad5c6237167ce9769d8cc78fca360834f576 | [
"Apache-2.0"
] | null | null | null | lib/livebook/config.ex | benjreinhart/livebook | 0500ad5c6237167ce9769d8cc78fca360834f576 | [
"Apache-2.0"
] | null | null | null | lib/livebook/config.ex | benjreinhart/livebook | 0500ad5c6237167ce9769d8cc78fca360834f576 | [
"Apache-2.0"
] | null | null | null | defmodule Livebook.Config do
@moduledoc false
@type auth_mode() :: :token | :password | :disabled
@doc """
Returns the longname if the distribution mode is configured to use long names.
"""
@spec longname() :: binary() | nil
def longname() do
host = Livebook.Utils.node_host()
if host =~ "." do
host
end
end
@doc """
Returns the runtime module and `init` args used to start
the default runtime.
"""
@spec default_runtime() :: {Livebook.Runtime.t(), list()}
def default_runtime() do
Application.fetch_env!(:livebook, :default_runtime)
end
@doc """
Returns the authentication mode.
"""
@spec auth_mode() :: auth_mode()
def auth_mode() do
Application.fetch_env!(:livebook, :authentication_mode)
end
@doc """
Return the root path for persisting notebooks.
"""
@spec root_path() :: binary()
def root_path() do
Application.fetch_env!(:livebook, :root_path)
end
## Parsing
@doc """
Parses and validates the root path from env.
"""
def root_path!(env) do
if root_path = System.get_env(env) do
root_path!("LIVEBOOK_ROOT_PATH", root_path)
else
File.cwd!()
end
end
@doc """
Validates `root_path` within context.
"""
def root_path!(context, root_path) do
if File.dir?(root_path) do
root_path
else
IO.warn("ignoring #{context} because it doesn't point to a directory: #{root_path}")
File.cwd!()
end
end
@doc """
Parses and validates the secret from env.
"""
def secret!(env) do
if secret_key_base = System.get_env(env) do
if byte_size(secret_key_base) < 64 do
abort!(
"cannot start Livebook because #{env} must be at least 64 characters. " <>
"Invoke `openssl rand -base64 48` to generate an appropriately long secret."
)
end
secret_key_base
end
end
@doc """
Parses and validates the port from env.
"""
def port!(env) do
if port = System.get_env(env) do
case Integer.parse(port) do
{port, ""} -> port
:error -> abort!("expected #{env} to be an integer, got: #{inspect(port)}")
end
end
end
@doc """
Parses and validates the ip from env.
"""
def ip!(env) do
if ip = System.get_env(env) do
ip!(env, ip)
end
end
@doc """
Parses and validates the ip within context.
"""
def ip!(context, ip) do
case ip |> String.to_charlist() |> :inet.parse_address() do
{:ok, ip} ->
ip
{:error, :einval} ->
abort!("expected #{context} to be a valid ipv4 or ipv6 address, got: #{ip}")
end
end
@doc """
Parses the cookie from env.
"""
def cookie!(env) do
if cookie = System.get_env(env) do
String.to_atom(cookie)
end
end
@doc """
Parses and validates the password from env.
"""
def password!(env) do
if password = System.get_env(env) do
if byte_size(password) < 12 do
abort!("cannot start Livebook because #{env} must be at least 12 characters")
end
password
end
end
@doc """
Parses and validates default runtime from env.
"""
def default_runtime!(env) do
if runtime = System.get_env(env) do
default_runtime!(env, runtime)
end
end
@doc """
Parses and validates default runtime within context.
"""
def default_runtime!(context, runtime) do
case runtime do
"standalone" ->
{Livebook.Runtime.ElixirStandalone, []}
"embedded" ->
{Livebook.Runtime.Embedded, []}
"mix" ->
case mix_path(File.cwd!()) do
{:ok, path} ->
{Livebook.Runtime.MixStandalone, [path]}
:error ->
abort!(
"the current directory is not a Mix project, make sure to specify the path explicitly with mix:path"
)
end
"mix:" <> path ->
case mix_path(path) do
{:ok, path} ->
{Livebook.Runtime.MixStandalone, [path]}
:error ->
abort!(~s{"#{path}" does not point to a Mix project})
end
other ->
abort!(
~s{expected #{context} to be either "standalone", "mix[:path]" or "embedded", got: #{inspect(other)}}
)
end
end
defp mix_path(path) do
path = Path.expand(path)
mixfile = Path.join(path, "mix.exs")
if File.exists?(mixfile) do
{:ok, path}
else
:error
end
end
@doc """
Aborts booting due to a configuration error.
"""
def abort!(message) do
IO.puts("\nERROR!!! [Livebook] " <> message)
System.halt(1)
end
end
| 22.281553 | 114 | 0.591721 |
9e4599c4e2bb731621897b2454fd57e9f7ff0ff6 | 3,180 | exs | Elixir | test/graphql/resolvers/comment_test.exs | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 806 | 2018-04-07T20:40:33.000Z | 2022-03-30T01:39:57.000Z | test/graphql/resolvers/comment_test.exs | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 194 | 2018-04-07T13:49:37.000Z | 2022-03-30T19:58:45.000Z | test/graphql/resolvers/comment_test.exs | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 89 | 2018-04-09T13:55:49.000Z | 2022-03-24T07:09:31.000Z | defmodule AccentTest.GraphQL.Resolvers.Comment do
use Accent.RepoCase
use Oban.Testing, repo: Accent.Repo
alias Accent.GraphQL.Resolvers.Comment, as: Resolver
alias Accent.{
Comment,
Language,
Project,
Repo,
Revision,
Translation,
User
}
defmodule PlugConn do
defstruct [:assigns]
end
@user %User{email: "[email protected]"}
setup do
user = Repo.insert!(@user)
french_language = %Language{name: "french"} |> Repo.insert!()
project = %Project{main_color: "#f00", name: "My project"} |> Repo.insert!()
revision = %Revision{language_id: french_language.id, project_id: project.id, master: true} |> Repo.insert!()
translation = %Translation{revision_id: revision.id, key: "ok", corrected_text: "bar", proposed_text: "bar"} |> Repo.insert!()
{:ok, [user: user, project: project, translation: translation]}
end
test "create", %{translation: translation, project: project, user: user} do
context = %{context: %{conn: %PlugConn{assigns: %{current_user: user}}}}
{:ok, result} = Resolver.create(translation, %{text: "First comment"}, context)
assert_enqueued(
worker: Accent.Hook.Outbounds.Mock,
args: %{
"event" => "create_comment",
"payload" => %{
"text" => "First comment",
"user" => %{"email" => user.email},
"translation" => %{"id" => translation.id, "key" => translation.key}
},
"project_id" => project.id,
"user_id" => user.id
}
)
assert get_in(result, [:errors]) == nil
assert get_in(Repo.all(Comment), [Access.all(), Access.key(:text)]) == ["First comment"]
end
test "delete", %{translation: translation, user: user} do
comment = %Comment{translation_id: translation.id, text: "test", user: user} |> Repo.insert!()
assert get_in(Repo.all(Comment), [Access.all(), Access.key(:id)]) == [comment.id]
{:ok, result} = Resolver.delete(comment, nil, nil)
assert get_in(result, [:errors]) == nil
assert Repo.all(Comment) == []
end
test "update", %{translation: translation, user: user} do
comment = %Comment{translation_id: translation.id, text: "test", user: user} |> Repo.insert!()
assert get_in(Repo.all(Comment), [Access.all(), Access.key(:id)]) == [comment.id]
{:ok, result} = Resolver.update(comment, %{text: "updated"}, nil)
assert get_in(result, [:errors]) == nil
assert get_in(Repo.all(Comment), [Access.all(), Access.key(:text)]) == ["updated"]
end
test "list project", %{project: project, translation: translation, user: user} do
comment = %Comment{translation_id: translation.id, text: "test", user: user} |> Repo.insert!()
{:ok, result} = Resolver.list_project(project, %{}, %{})
assert get_in(result, [:entries, Access.all(), Access.key(:id)]) == [comment.id]
end
test "list translation", %{translation: translation, user: user} do
comment = %Comment{translation_id: translation.id, text: "test", user: user} |> Repo.insert!()
{:ok, result} = Resolver.list_translation(translation, %{}, %{})
assert get_in(result, [:entries, Access.all(), Access.key(:id)]) == [comment.id]
end
end
| 33.473684 | 130 | 0.632704 |
9e45a7cdb11938f2897b13d7f9865493ac64eb6c | 6,648 | exs | Elixir | lib/logger/test/logger_test.exs | liveforeverx/elixir | cf3cf0bd5443b59206e5733602244bc3543f0a53 | [
"Apache-2.0"
] | null | null | null | lib/logger/test/logger_test.exs | liveforeverx/elixir | cf3cf0bd5443b59206e5733602244bc3543f0a53 | [
"Apache-2.0"
] | null | null | null | lib/logger/test/logger_test.exs | liveforeverx/elixir | cf3cf0bd5443b59206e5733602244bc3543f0a53 | [
"Apache-2.0"
] | null | null | null | defmodule LoggerTest do
use Logger.Case
require Logger
test "add_translator/1 and remove_translator/1" do
defmodule CustomTranslator do
def t(:debug, :info, :format, {'hello: ~p', [:ok]}) do
:skip
end
def t(:debug, :info, :format, {'world: ~p', [:ok]}) do
{:ok, "rewritten"}
end
def t(_, _, _, _) do
:none
end
end
assert Logger.add_translator({CustomTranslator, :t})
assert capture_log(fn ->
:error_logger.info_msg('hello: ~p', [:ok])
end) == ""
assert capture_log(fn ->
:error_logger.info_msg('world: ~p', [:ok])
end) =~ "\[info\] rewritten"
after
assert Logger.remove_translator({CustomTranslator, :t})
end
test "add_backend/1 and remove_backend/1" do
assert :ok = Logger.remove_backend(:console)
assert Application.get_env(:logger, :backends) == []
assert Logger.remove_backend(:console) ==
{:error, :not_found}
assert capture_log(fn ->
assert Logger.debug("hello", []) == :ok
end) == ""
assert {:ok, _pid} = Logger.add_backend(:console)
assert Application.get_env(:logger, :backends) == [:console]
assert Logger.add_backend(:console) == {:error, :already_present}
assert Application.get_env(:logger, :backends) == [:console]
end
test "add_backend/1 with {module, id}" do
defmodule MyBackend do
use GenEvent
def init({MyBackend, :hello}) do
{:ok, :hello}
end
end
assert {:ok, _} = Logger.add_backend({MyBackend, :hello})
assert {:error, :already_present} = Logger.add_backend({MyBackend, :hello})
assert :ok = Logger.remove_backend({MyBackend, :hello})
end
test "level/0" do
assert Logger.level == :debug
end
test "enable/1 and disable/1" do
assert Logger.metadata([]) == :ok
assert capture_log(fn ->
assert Logger.debug("hello", []) == :ok
end) =~ msg("[debug] hello")
assert Logger.disable(self()) == :ok
assert capture_log(fn ->
assert Logger.debug("hello", []) == :ok
end) == ""
assert Logger.metadata([]) == :ok
assert capture_log(fn ->
assert Logger.debug("hello", []) == :ok
end) == ""
assert Logger.enable(self()) == :ok
assert capture_log(fn ->
assert Logger.debug("hello", []) == :ok
end) =~ msg("[debug] hello")
end
test "compare_levels/2" do
assert Logger.compare_levels(:debug, :debug) == :eq
assert Logger.compare_levels(:debug, :info) == :lt
assert Logger.compare_levels(:debug, :warn) == :lt
assert Logger.compare_levels(:debug, :error) == :lt
assert Logger.compare_levels(:info, :debug) == :gt
assert Logger.compare_levels(:info, :info) == :eq
assert Logger.compare_levels(:info, :warn) == :lt
assert Logger.compare_levels(:info, :error) == :lt
assert Logger.compare_levels(:warn, :debug) == :gt
assert Logger.compare_levels(:warn, :info) == :gt
assert Logger.compare_levels(:warn, :warn) == :eq
assert Logger.compare_levels(:warn, :error) == :lt
assert Logger.compare_levels(:error, :debug) == :gt
assert Logger.compare_levels(:error, :info) == :gt
assert Logger.compare_levels(:error, :warn) == :gt
assert Logger.compare_levels(:error, :error) == :eq
end
test "debug/2" do
assert capture_log(fn ->
assert Logger.debug("hello", []) == :ok
end) =~ msg("[debug] hello")
assert capture_log(:info, fn ->
assert Logger.debug("hello", []) == :ok
end) == ""
end
test "info/2" do
assert capture_log(fn ->
assert Logger.info("hello", []) == :ok
end) =~ msg("[info] hello")
assert capture_log(:warn, fn ->
assert Logger.info("hello", []) == :ok
end) == ""
end
test "warn/2" do
assert capture_log(fn ->
assert Logger.warn("hello", []) == :ok
end) =~ msg("[warn] hello")
assert capture_log(:error, fn ->
assert Logger.warn("hello", []) == :ok
end) == ""
end
test "error/2" do
assert capture_log(fn ->
assert Logger.error("hello", []) == :ok
end) =~ msg("[error] hello")
end
test "remove unused calls at compile time" do
Logger.configure(compile_time_purge_level: :info)
defmodule Sample do
def debug do
Logger.debug "hello"
end
def info do
Logger.info "hello"
end
end
assert capture_log(fn ->
assert Sample.debug == :ok
end) == ""
assert capture_log(fn ->
assert Sample.info == :ok
end) =~ msg("[info] hello")
after
Logger.configure(compile_time_purge_level: :debug)
end
test "log/2 truncates messages" do
Logger.configure(truncate: 4)
assert capture_log(fn ->
Logger.log(:debug, "hello")
end) =~ "hell (truncated)"
after
Logger.configure(truncate: 8096)
end
test "log/2 with to_string/1 conversion" do
Logger.configure(truncate: 4)
assert capture_log(fn ->
Logger.log(:debug, :hello)
end) =~ "hell (truncated)"
after
Logger.configure(truncate: 8096)
end
test "log/2 does not fails when the Logger is off" do
logger = Process.whereis(Logger)
Process.unregister(Logger)
try do
assert Logger.log(:debug, "hello") == {:error, :noproc}
after
Process.register(logger, Logger)
end
end
test "log/2 relies on sync_threshold" do
Logger.remove_backend(:console)
Logger.configure(sync_threshold: 0)
for _ <- 1..1000, do: Logger.log(:info, "some message")
after
Logger.configure(sync_threshold: 20)
Logger.add_backend(:console)
end
test "stop the application silently" do
Application.put_env(:logger, :backends, [])
Logger.App.stop()
Application.start(:logger)
assert capture_log(fn ->
assert Logger.debug("hello", []) == :ok
end) == ""
assert {:ok, pid} = Logger.add_backend(:console)
assert Logger.add_backend(:console) ==
{:error, :already_present}
after
Application.put_env(:logger, :backends, [:console])
Logger.App.stop()
Application.start(:logger)
end
test "restarts Logger.Config on Logger exits" do
Process.whereis(Logger) |> Process.exit(:kill)
wait_for_logger()
wait_for_handler(Logger, Logger.Config)
wait_for_handler(:error_logger, Logger.ErrorHandler)
end
test "Logger.Config updates config on config_change/3" do
:ok = Logger.configure([level: :debug])
try do
Application.put_env(:logger, :level, :error)
assert Logger.App.config_change([level: :error], [], []) === :ok
assert Logger.level() === :error
after
Logger.configure([level: :debug])
end
end
end
| 26.698795 | 79 | 0.61778 |
9e45b6fc1b3e3dc44bfa86f721fc0a4a06d12605 | 2,700 | ex | Elixir | lib/symmetric_encryption.ex | feals-inc/symmetric_encryption.ex | 74cee41e615713238e8d4211ab069784d5a761e3 | [
"Apache-2.0"
] | 2 | 2020-08-20T20:01:07.000Z | 2020-08-21T01:47:08.000Z | lib/symmetric_encryption.ex | feals-inc/symmetric_encryption.ex | 74cee41e615713238e8d4211ab069784d5a761e3 | [
"Apache-2.0"
] | 2 | 2020-09-28T14:46:21.000Z | 2020-10-12T22:04:56.000Z | lib/symmetric_encryption.ex | feals-inc/symmetric_encryption.ex | 74cee41e615713238e8d4211ab069784d5a761e3 | [
"Apache-2.0"
] | 2 | 2020-09-24T14:37:03.000Z | 2020-10-11T11:15:19.000Z | defmodule SymmetricEncryption do
@moduledoc """
Symmetric Encryption.
Supports AES symmetric encryption using the CBC block cipher.
"""
@doc """
Encrypt String data.
## Examples
iex> SymmetricEncryption.encrypt("Hello World")
"QEVuQwIAPiplaSyln4bywEKXYKDOqQ=="
"""
defdelegate encrypt(data), to: SymmetricEncryption.Encryptor
@doc """
Always return the same encrypted value for the same input data.
The same global IV is used to generate the encrypted data, which is considered insecure since
too much encrypted data using the same key and IV will allow hackers to reverse the key.
The same encrypted value is returned every time the same data is encrypted, which is useful
when the encrypted value is used with database lookups etc.
## Examples
iex> SymmetricEncryption.fixed_encrypt("Hello World")
"QEVuQwIAPiplaSyln4bywEKXYKDOqQ=="
iex> SymmetricEncryption.fixed_encrypt("Hello World")
"QEVuQwIAPiplaSyln4bywEKXYKDOqQ=="
"""
defdelegate fixed_encrypt(data), to: SymmetricEncryption.Encryptor
@doc """
Decrypt String data.
## Examples
iex> encrypted = SymmetricEncryption.encrypt("Hello World")
"QEVuQwIAPiplaSyln4bywEKXYKDOqQ=="
iex> SymmetricEncryption.decrypt(encrypted)
"Hello World"
"""
defdelegate decrypt(encrypted), to: SymmetricEncryption.Decryptor
@doc """
Is the string encrypted?
## Examples
iex> encrypted = SymmetricEncryption.encrypt("Hello World")
"QEVuQwIAPiplaSyln4bywEKXYKDOqQ=="
iex> SymmetricEncryption.encrypted?(encrypted)
true
iex> SymmetricEncryption.encrypted?("Hello World")
false
"""
defdelegate encrypted?(encrypted), to: SymmetricEncryption.Decryptor
@doc """
Return the header for an encrypted string.
## Examples
iex> encrypted = SymmetricEncryption.encrypt("Hello World")
"QEVuQwJAEAAPX3a7EGJ7STMqIO8g38VeB7mFO/DC6DhdYljT4AmdFw=="
iex> SymmetricEncryption.header(encrypted)
%SymmetricEncryption.Header{
auth_tag: nil,
cipher_name: nil,
compress: false,
encrypted_key: nil,
iv: <<15, 95, 118, 187, 16, 98, 123, 73, 51, 42, 32, 239, 32, 223, 197, 94>>,
version: 2
}
"""
def header(encrypted) do
{_, header} = SymmetricEncryption.Decryptor.parse_header(encrypted)
header
end
@doc """
Adds a Cipher struct into memory
## Examples
iex> SymmetricEncryption.add_cipher(%Cipher{iv: "fake_iv", key: "fake_key" , version: 1})
%SymmetricEncryption.Cipher{
iv: "fake_iv",
key: "fake_key",
version: 1
}
"""
defdelegate add_cipher(cipher), to: SymmetricEncryption.Config
end
| 26.213592 | 95 | 0.696667 |
9e45b8dc01d8c15af5b47dc7fffae8fde91efe27 | 1,161 | ex | Elixir | web/channels/user_socket.ex | rubencaro/pedro | b550b3af700962283fa9e3985e1dcc2da2e14d0d | [
"MIT"
] | null | null | null | web/channels/user_socket.ex | rubencaro/pedro | b550b3af700962283fa9e3985e1dcc2da2e14d0d | [
"MIT"
] | null | null | null | web/channels/user_socket.ex | rubencaro/pedro | b550b3af700962283fa9e3985e1dcc2da2e14d0d | [
"MIT"
] | null | null | null | defmodule Pedro.UserSocket do
use Phoenix.Socket
## Channels
# channel "rooms:*", Pedro.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "users_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# Pedro.Endpoint.broadcast("users_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 30.552632 | 83 | 0.700258 |
9e46056d3f4f2bbe231daab73732c1861cc110d2 | 81 | ex | Elixir | lib/price_tracker_web/views/tracked_product_view.ex | kevinbenard/price_tracker | 0a7248edd4afca21f00e8b0fc05561f97799c0ca | [
"MIT"
] | null | null | null | lib/price_tracker_web/views/tracked_product_view.ex | kevinbenard/price_tracker | 0a7248edd4afca21f00e8b0fc05561f97799c0ca | [
"MIT"
] | null | null | null | lib/price_tracker_web/views/tracked_product_view.ex | kevinbenard/price_tracker | 0a7248edd4afca21f00e8b0fc05561f97799c0ca | [
"MIT"
] | null | null | null | defmodule PriceTrackerWeb.TrackedProductView do
use PriceTrackerWeb, :view
end
| 20.25 | 47 | 0.851852 |
9e4611482366af48cddb60c36b906fd3c1893e8c | 735 | ex | Elixir | lib/budget_app_web/controllers/fallback_controller.ex | djordje/budget_app.backend | 3febe64892e700f3174b8eddbc4b96260c444308 | [
"MIT"
] | null | null | null | lib/budget_app_web/controllers/fallback_controller.ex | djordje/budget_app.backend | 3febe64892e700f3174b8eddbc4b96260c444308 | [
"MIT"
] | null | null | null | lib/budget_app_web/controllers/fallback_controller.ex | djordje/budget_app.backend | 3febe64892e700f3174b8eddbc4b96260c444308 | [
"MIT"
] | null | null | null | defmodule BudgetAppWeb.FallbackController do
@moduledoc """
Translates controller action results into valid `Plug.Conn` responses.
See `Phoenix.Controller.action_fallback/1` for more details.
"""
use BudgetAppWeb, :controller
def call(conn, {:error, %Ecto.Changeset{} = changeset}) do
conn
|> put_status(:unprocessable_entity)
|> render(BudgetAppWeb.ChangesetView, "error.json", changeset: changeset)
end
def call(conn, {:error, :not_found}) do
conn
|> put_status(:not_found)
|> render(BudgetAppWeb.ErrorView, :"404")
end
def call(conn, {:operation_error, message}) do
conn
|> put_status(:bad_request)
|> render(BudgetAppWeb.ErrorView, "400.json", detail: message)
end
end
| 27.222222 | 77 | 0.70068 |
9e46361ca3a2d9e1fcae434c6ca410e19d63b2f2 | 550 | exs | Elixir | priv/repo/migrations/20161030220220_modify_emails_unique_constraint.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 691 | 2017-03-08T09:15:45.000Z | 2022-03-23T22:04:47.000Z | priv/repo/migrations/20161030220220_modify_emails_unique_constraint.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 491 | 2017-03-07T12:58:42.000Z | 2022-03-29T23:32:54.000Z | priv/repo/migrations/20161030220220_modify_emails_unique_constraint.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 200 | 2017-03-12T23:03:39.000Z | 2022-03-05T17:55:52.000Z | defmodule Hexpm.Repo.Migrations.ModifyEmailsUniqueConstraint do
use Ecto.Migration
def up() do
execute("ALTER TABLE emails DROP CONSTRAINT emails_email_key")
execute("CREATE UNIQUE INDEX emails_email_key ON emails (email) WHERE verified = 'true'")
execute("CREATE UNIQUE INDEX emails_email_user_key ON emails (email, user_id)")
end
def down() do
execute("DROP INDEX emails_email_key")
execute("DROP INDEX emails_email_user_key")
execute("ALTER TABLE emails ADD CONSTRAINT emails_email_key UNIQUE (email)")
end
end
| 34.375 | 93 | 0.761818 |
9e463fae062397e2a9948e2fc0e4b8d1b157b3ba | 134 | ex | Elixir | web/controllers/page_controller.ex | slurmulon/thing | 21791e7a9579d41de28a0a6131218aed3679d033 | [
"Apache-2.0"
] | null | null | null | web/controllers/page_controller.ex | slurmulon/thing | 21791e7a9579d41de28a0a6131218aed3679d033 | [
"Apache-2.0"
] | null | null | null | web/controllers/page_controller.ex | slurmulon/thing | 21791e7a9579d41de28a0a6131218aed3679d033 | [
"Apache-2.0"
] | null | null | null | defmodule Thing.PageController do
use Thing.Web, :controller
def index(conn, _params) do
render conn, "index.html"
end
end
| 16.75 | 33 | 0.723881 |
9e46480398f93ab5a49304a8f8335da25dbaa283 | 1,190 | ex | Elixir | elixir/hello/lib/hello_web/endpoint.ex | hwaterke/backend-challenge | 107f373c38b09b1db3562a843ce4c38428bc4b48 | [
"MIT"
] | 2 | 2019-10-29T22:24:37.000Z | 2019-11-01T16:48:43.000Z | elixir/hello/lib/hello_web/endpoint.ex | hwaterke/backend-challenge | 107f373c38b09b1db3562a843ce4c38428bc4b48 | [
"MIT"
] | null | null | null | elixir/hello/lib/hello_web/endpoint.ex | hwaterke/backend-challenge | 107f373c38b09b1db3562a843ce4c38428bc4b48 | [
"MIT"
] | null | null | null | defmodule HelloWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :hello
socket "/socket", HelloWeb.UserSocket,
websocket: true,
longpoll: false
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :hello,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_hello_key",
signing_salt: "shozrmqC"
plug HelloWeb.Router
end
| 26.444444 | 63 | 0.701681 |
9e464ae230e02d901df01687179323c6597bfe4c | 1,480 | ex | Elixir | lib/prima_auth0_ex/token_provider/auth0_jwks_kids_fetcher.ex | primait/auth0_ex | 15ef5d6d91d8fe00ff703a4f58e1cb32bb169a82 | [
"MIT"
] | 5 | 2021-12-01T10:50:40.000Z | 2022-02-15T13:07:02.000Z | lib/prima_auth0_ex/token_provider/auth0_jwks_kids_fetcher.ex | primait/auth0_ex | 15ef5d6d91d8fe00ff703a4f58e1cb32bb169a82 | [
"MIT"
] | 11 | 2021-12-22T09:19:28.000Z | 2022-03-24T06:15:04.000Z | lib/prima_auth0_ex/token_provider/auth0_jwks_kids_fetcher.ex | primait/auth0_ex | 15ef5d6d91d8fe00ff703a4f58e1cb32bb169a82 | [
"MIT"
] | null | null | null | defmodule PrimaAuth0Ex.TokenProvider.Auth0JwksKidsFetcher do
@moduledoc """
Fetches key ids (aka `kid`s) from Auth0 JWKS
"""
require Logger
alias PrimaAuth0Ex.TokenProvider.JwksKidsFetcher
@behaviour JwksKidsFetcher
@auth0_jwks_api_path "/.well-known/jwks.json"
@spec fetch_kids(PrimaAuth0Ex.Auth0Credentials.t()) :: {:ok, [String.t()]} | {:error, any()}
@impl JwksKidsFetcher
def fetch_kids(credentials) do
jwks_url = credentials.base_url <> @auth0_jwks_api_path
jwks_url
|> Telepoison.get()
|> parse_body()
|> extract_kids()
end
defp parse_body({:ok, %HTTPoison.Response{status_code: 200, body: body}}) do
case Jason.decode(body) do
{:ok, jwks} ->
jwks
{:error, error} ->
Logger.warning("Error parsing JWKS", error: inspect(error))
{:error, error}
end
end
defp parse_body(error_response) do
Logger.warning("Error retrieving JWKS", response: inspect(error_response))
{:error, error_response}
end
defp extract_kids({:error, reason}), do: {:error, reason}
defp extract_kids(jwks) do
case get_in(jwks, ["keys", Access.all(), "kid"]) do
nil ->
Logger.warning("Error parsing kids from JWKS", jwks: inspect(jwks))
{:error, :malformed_jwks}
kids ->
{:ok, kids}
end
rescue
error ->
Logger.warning("Error parsing kids from JWKS", jwks: inspect(jwks), error: inspect(error))
{:error, :malformed_jwks}
end
end
| 26.428571 | 96 | 0.656757 |
9e464e4446b7098a90b8daa8258d181b08a76b83 | 63 | ex | Elixir | lib/travel/repo.ex | markevich/travel | f6f7dd7dd459149926c305984685d91ca1376f77 | [
"MIT"
] | 4 | 2016-11-01T14:59:33.000Z | 2016-11-01T15:05:57.000Z | lib/travel/repo.ex | markevich/travel | f6f7dd7dd459149926c305984685d91ca1376f77 | [
"MIT"
] | null | null | null | lib/travel/repo.ex | markevich/travel | f6f7dd7dd459149926c305984685d91ca1376f77 | [
"MIT"
] | null | null | null | defmodule Travel.Repo do
use Ecto.Repo, otp_app: :travel
end
| 15.75 | 33 | 0.761905 |
9e464e99e74ecea09369a8d3924b28843781916f | 437 | exs | Elixir | test/fixtures/toolchain/nerves.exs | opencollective/nerves | 81f5d30de283e77f3720a87fa1435619f0da12de | [
"Apache-2.0"
] | 1 | 2019-06-12T17:34:10.000Z | 2019-06-12T17:34:10.000Z | test/fixtures/toolchain/nerves.exs | opencollective/nerves | 81f5d30de283e77f3720a87fa1435619f0da12de | [
"Apache-2.0"
] | null | null | null | test/fixtures/toolchain/nerves.exs | opencollective/nerves | 81f5d30de283e77f3720a87fa1435619f0da12de | [
"Apache-2.0"
] | null | null | null | use Mix.Config
version =
Path.join(__DIR__, "VERSION")
|> File.read!
|> String.trim
config :toolchain, :nerves_env,
type: :toolchain,
version: version,
compiler: :nerves_package,
target_tuple: :x86_64_unknown_linux_musl,
platform: Nerves.Toolchain.CTNG,
platform_config: [
defconfig: [
darwin: "darwin_defconfig",
linux: "linux_defconfig"
],
package_files: [
"linux_defconfig"
]
]
| 19 | 43 | 0.665904 |
9e4659e3703c5d2d02a83360c34109a594584699 | 9,517 | exs | Elixir | spec/aoc/day10_spec.exs | CraigCottingham/advent-of-code-2019 | 76a1545e4cca14fe1e9e0de475de253170da1645 | [
"Apache-2.0"
] | null | null | null | spec/aoc/day10_spec.exs | CraigCottingham/advent-of-code-2019 | 76a1545e4cca14fe1e9e0de475de253170da1645 | [
"Apache-2.0"
] | null | null | null | spec/aoc/day10_spec.exs | CraigCottingham/advent-of-code-2019 | 76a1545e4cca14fe1e9e0de475de253170da1645 | [
"Apache-2.0"
] | null | null | null | defmodule AoC.Day10.Spec do
@moduledoc false
use ESpec
describe "sanity checks" do
it "tests a small example" do
expect(
AoC.Day10.zap_asteroids(
[
".#....#####...#..",
"##...##.#####..##",
"##...#...#.#####.",
"..#.....X...###..",
"..#.#.....#....##"
],
{8, 3}
)
)
|> to(
eq([
# .#....###24...#..
# ##...##.13#67..9#
# ##...#...5.8####.
# ..#.....X...###..
# ..#.#.....#....##
{8, 1},
{9, 0},
{9, 1},
{10, 0},
{9, 2},
{11, 1},
{12, 1},
{11, 2},
{15, 1},
# .#....###.....#..
# ##...##...#.....#
# ##...#......1234.
# ..#.....X...5##..
# ..#.9.....8....76
{12, 2},
{13, 2},
{14, 2},
{15, 2},
{12, 3},
{16, 4},
{15, 4},
{10, 4},
{4, 4},
# .8....###.....#..
# 56...9#...#.....#
# 34...7...........
# ..2.....X....##..
# ..1..............
{2, 4},
{2, 3},
{0, 2},
{1, 2},
{0, 1},
{1, 1},
{5, 2},
{1, 0},
{5, 1},
# ......234.....6..
# ......1...5.....7
# .................
# ........X....89..
# .................
{6, 1},
{6, 0},
{7, 0},
{8, 0},
{10, 1},
{14, 0},
{16, 1},
{13, 3},
{14, 3}
])
)
end
it "tests a bigger example" do
zapped_order =
AoC.Day10.zap_asteroids(
[
".#..##.###...#######",
"##.############..##.",
".#.######.########.#",
".###.#######.####.#.",
"#####.##.#.##.###.##",
"..#####..#.#########",
"####################",
"#.####....###.#.#.##",
"##.#################",
"#####.##.###..####..",
"..######..##.#######",
"####.##.####...##..#",
".#####..#.######.###",
"##...#.##########...",
"#.##########.#######",
".####.#.###.###.#.##",
"....##.##.###..#####",
".#.#.###########.###",
"#.#.#.#####.####.###",
"###.##.####.##.#..##"
],
{11, 13}
)
expect(Enum.slice(zapped_order, 0..2)) |> to(eq([{11, 12}, {12, 1}, {12, 2}]))
expect(Enum.at(zapped_order, 9)) |> to(eq({12, 8}))
expect(Enum.at(zapped_order, 19)) |> to(eq({16, 0}))
expect(Enum.at(zapped_order, 49)) |> to(eq({16, 9}))
expect(Enum.at(zapped_order, 99)) |> to(eq({10, 16}))
expect(Enum.at(zapped_order, 198)) |> to(eq({9, 6}))
expect(Enum.at(zapped_order, 199)) |> to(eq({8, 2}))
expect(Enum.at(zapped_order, 200)) |> to(eq({10, 9}))
expect(Enum.at(zapped_order, 298)) |> to(eq({11, 1}))
end
end
example_group "angle/2" do
it("positive Y axis",
do: expect(AoC.Day10.angle({0, 0}, {0, -1})) |> to(eq(Math.pi() * 0 / 4))
)
it("quadrant I", do: expect(AoC.Day10.angle({0, 0}, {1, -1})) |> to(eq(Math.pi() * 1 / 4)))
it("positive X axis", do: expect(AoC.Day10.angle({0, 0}, {1, 0})) |> to(eq(Math.pi() * 2 / 4)))
it("quadrant IV", do: expect(AoC.Day10.angle({0, 0}, {1, 1})) |> to(eq(Math.pi() * 3 / 4)))
it("negative Y axis", do: expect(AoC.Day10.angle({0, 0}, {0, 1})) |> to(eq(Math.pi() * 4 / 4)))
it("quadrant III", do: expect(AoC.Day10.angle({0, 0}, {-1, 1})) |> to(eq(Math.pi() * 5 / 4)))
it("negative X axis",
do: expect(AoC.Day10.angle({0, 0}, {-1, 0})) |> to(eq(Math.pi() * 6 / 4))
)
it("quadrant II", do: expect(AoC.Day10.angle({0, 0}, {-1, -1})) |> to(eq(Math.pi() * 7 / 4)))
end
example_group "detected_count" do
it do
# .7..7
# .....
# 67775
# ....7
# ...87
all_positions = [
{1, 0},
{4, 0},
{0, 2},
{1, 2},
{2, 2},
{3, 2},
{4, 2},
{4, 3},
{3, 4},
{4, 4}
]
expect(AoC.Day10.detected_count({1, 0}, List.delete(all_positions, {1, 0}))) |> to(eq(7))
expect(AoC.Day10.detected_count({4, 0}, List.delete(all_positions, {4, 0}))) |> to(eq(7))
expect(AoC.Day10.detected_count({0, 2}, List.delete(all_positions, {0, 2}))) |> to(eq(6))
expect(AoC.Day10.detected_count({1, 2}, List.delete(all_positions, {1, 2}))) |> to(eq(7))
expect(AoC.Day10.detected_count({2, 2}, List.delete(all_positions, {2, 2}))) |> to(eq(7))
expect(AoC.Day10.detected_count({3, 2}, List.delete(all_positions, {3, 2}))) |> to(eq(7))
expect(AoC.Day10.detected_count({4, 2}, List.delete(all_positions, {4, 2}))) |> to(eq(5))
expect(AoC.Day10.detected_count({4, 3}, List.delete(all_positions, {4, 3}))) |> to(eq(7))
expect(AoC.Day10.detected_count({3, 4}, List.delete(all_positions, {3, 4}))) |> to(eq(8))
expect(AoC.Day10.detected_count({4, 4}, List.delete(all_positions, {4, 4}))) |> to(eq(7))
end
end
example_group "distance/2" do
it(do: expect(AoC.Day10.distance({0, 0}, {1, 0})) |> to(eq(1)))
it(do: expect(AoC.Day10.distance({0, 0}, {0, 1})) |> to(eq(1)))
it(do: expect(AoC.Day10.distance({0, 0}, {-1, 0})) |> to(eq(1)))
it(do: expect(AoC.Day10.distance({0, 0}, {1, 1})) |> to(eq(Math.sqrt(2))))
it(do: expect(AoC.Day10.distance({1, 2}, {4, 6})) |> to(eq(5)))
end
example_group "filter/2" do
it do
map_data = [".#..#", ".....", "#####", "....#", "...##"]
m = AoC.Day10.map_data_to_matrix(map_data)
expect(AoC.Day10.filter(m, fn {_, value} -> value == 1 end))
|> to(eq([{1, 0}, {4, 0}, {0, 2}, {1, 2}, {2, 2}, {3, 2}, {4, 2}, {4, 3}, {3, 4}, {4, 4}]))
end
end
example_group "map_data_to_matrix/1" do
it do
map_data = [".#..#", ".....", "#####", "....#", "...##"]
m = AoC.Day10.map_data_to_matrix(map_data)
expect(m.rows) |> to(eq(5))
expect(m.columns) |> to(eq(5))
expect(Max.to_list_of_lists(m))
|> to(
eq([[0, 1, 0, 0, 1], [0, 0, 0, 0, 0], [1, 1, 1, 1, 1], [0, 0, 0, 0, 1], [0, 0, 0, 1, 1]])
)
end
end
example_group "max_detected/1" do
it(
do:
expect(AoC.Day10.max_detected([".#..#", ".....", "#####", "....#", "...##"]))
|> to(eq({{3, 4}, 8}))
)
it(
do:
expect(
AoC.Day10.max_detected([
"......#.#.",
"#..#.#....",
"..#######.",
".#.#.###..",
".#..#.....",
"..#....#.#",
"#..#....#.",
".##.#..###",
"##...#..#.",
".#....####"
])
)
|> to(eq({{5, 8}, 33}))
)
it(
do:
expect(
AoC.Day10.max_detected([
"#.#...#.#.",
".###....#.",
".#....#...",
"##.#.#.#.#",
"....#.#.#.",
".##..###.#",
"..#...##..",
"..##....##",
"......#...",
".####.###."
])
)
|> to(eq({{1, 2}, 35}))
)
it(
do:
expect(
AoC.Day10.max_detected([
".#..#..###",
"####.###.#",
"....###.#.",
"..###.##.#",
"##.##.#.#.",
"....###..#",
"..#.#..#.#",
"#..#.#.###",
".##...##.#",
".....#.#.."
])
)
|> to(eq({{6, 3}, 41}))
)
it(
do:
expect(
AoC.Day10.max_detected([
".#..##.###...#######",
"##.############..##.",
".#.######.########.#",
".###.#######.####.#.",
"#####.##.#.##.###.##",
"..#####..#.#########",
"####################",
"#.####....###.#.#.##",
"##.#################",
"#####.##.###..####..",
"..######..##.#######",
"####.##.####...##..#",
".#####..#.######.###",
"##...#.##########...",
"#.##########.#######",
".####.#.###.###.#.##",
"....##.##.###..#####",
".#.#.###########.###",
"#.#.#.#####.####.###",
"###.##.####.##.#..##"
])
)
|> to(eq({{11, 13}, 210}))
)
end
example_group "position_angles/2" do
# .7..7
# .....
# 67775
# ....7
# ...87
it do
this_position = {4, 2}
other_positions = [{1, 0}, {4, 0}, {0, 2}, {1, 2}, {2, 2}, {3, 2}, {4, 3}, {3, 4}, {4, 4}]
expect(AoC.Day10.position_angles(this_position, other_positions))
|> to(
eq([
{{1, 0}, 5.3003915839322575},
{{4, 0}, 0},
{{0, 2}, Math.pi() * 3 / 2},
{{1, 2}, Math.pi() * 3 / 2},
{{2, 2}, Math.pi() * 3 / 2},
{{3, 2}, Math.pi() * 3 / 2},
{{4, 3}, Math.pi()},
{{3, 4}, 3.6052402625905993},
{{4, 4}, Math.pi()}
])
)
end
end
example_group "zap/2" do
it(
do:
expect(AoC.Day10.zap([{0, [{{1, 8}, 0}, {{0, 8}, 0}]}, {1, [{{2, 8}, 1}]}], []))
|> to(eq([{1, 8}, {2, 8}, {0, 8}]))
)
end
end
| 27.665698 | 99 | 0.295471 |
9e469e4b00096ba903c4488499becddfabef2426 | 1,107 | ex | Elixir | apps/institute/lib/institute/accounts.ex | hui-ad/institute | 28242d9d324d710a0e70678ec2d79099f1d3a98d | [
"MIT"
] | 4 | 2019-06-12T19:05:34.000Z | 2019-08-18T15:02:56.000Z | apps/institute/lib/institute/accounts.ex | hui-ad/institute | 28242d9d324d710a0e70678ec2d79099f1d3a98d | [
"MIT"
] | 33 | 2019-06-12T18:59:21.000Z | 2021-03-31T15:45:22.000Z | apps/institute/lib/institute/accounts.ex | hui-ad/institute | 28242d9d324d710a0e70678ec2d79099f1d3a98d | [
"MIT"
] | 1 | 2019-06-16T09:38:08.000Z | 2019-06-16T09:38:08.000Z | defmodule Institute.Accounts do
@moduledoc """
The Accounts context.
"""
alias Institute.Repo
alias Institute.Accounts.User
def get_user(id) do
Repo.get(User, id)
end
def get_user!(id) do
Repo.get!(User, id)
end
def get_user_by(params) do
Repo.get_by(User, params)
end
def list_users do
Repo.all(User)
end
def change_user(%User{} = user) do
User.changeset(user, %{})
end
def create_user(attrs \\ %{}) do
%User{}
|> User.changeset(attrs)
|> Repo.insert()
end
def change_registration(%User{} = user, params) do
User.registration_changeset(user, params)
end
def register_user(attrs \\ %{}) do
%User{}
|> User.registration_changeset(attrs)
|> Repo.insert()
end
def authenticate_by_username_and_pass(username, given_pass) do
user = get_user_by(username: username)
cond do
user && Pbkdf2.verify_pass(given_pass, user.password_hash) ->
{:ok, user}
user ->
{:error, :unauthorized}
true ->
Pbkdf2.no_user_verify()
{:error, :not_found}
end
end
end
| 18.147541 | 67 | 0.630533 |
9e46c4d8bb0a9b504f0e4a83731109f88f29d38f | 203 | ex | Elixir | test/support/test_repo.ex | Frameio/sage | 243b387cb4043caaca289eeedf0cc8f1a075ffc3 | [
"MIT"
] | null | null | null | test/support/test_repo.ex | Frameio/sage | 243b387cb4043caaca289eeedf0cc8f1a075ffc3 | [
"MIT"
] | null | null | null | test/support/test_repo.ex | Frameio/sage | 243b387cb4043caaca289eeedf0cc8f1a075ffc3 | [
"MIT"
] | null | null | null | defmodule TestRepo do
def transaction(fun) do
send(self(), {:transaction, fun})
{:ok, fun.()}
end
def rollback(error) do
send(self(), {:rollback, error})
{:error, error}
end
end
| 16.916667 | 37 | 0.605911 |
9e46dda96289d3fce56c7a68ccf71ead4f49e184 | 1,229 | exs | Elixir | test/maintenance_mode/plug_test.exs | tlux/maintenance_mode | 9c7f908e15df3bc5f7dbb08a1a7d52801d014656 | [
"MIT"
] | null | null | null | test/maintenance_mode/plug_test.exs | tlux/maintenance_mode | 9c7f908e15df3bc5f7dbb08a1a7d52801d014656 | [
"MIT"
] | null | null | null | test/maintenance_mode/plug_test.exs | tlux/maintenance_mode | 9c7f908e15df3bc5f7dbb08a1a7d52801d014656 | [
"MIT"
] | null | null | null | defmodule MaintenanceMode.PlugTest do
use ExUnit.Case
use Plug.Test
alias MaintenanceMode.Plug, as: MaintenanceModePlug
alias MaintenanceMode.StatusAgent
describe "init/1" do
test "get config when mod option given" do
assert MaintenanceModePlug.init(mod: MyMaintenanceMode) ==
{MyMaintenanceMode, MyMaintenanceMode.config()}
end
test "raise when mod option missing" do
assert_raise KeyError, ~r/key :mod not found/, fn ->
MaintenanceModePlug.init([])
end
end
end
describe "call/2" do
setup do
{:ok, conn: build_conn()}
end
test "do not update conn when maintenance mode disabled", context do
start_supervised!({StatusAgent, entries: %{MyMaintenanceMode => false}})
assert MaintenanceModePlug.call(context.conn, {MyMaintenanceMode, []}) ==
context.conn
end
test "send 503 when maintenance mode enabled", context do
start_supervised!({StatusAgent, entries: %{MyMaintenanceMode => true}})
conn = MaintenanceModePlug.call(context.conn, {MyMaintenanceMode, []})
assert conn.status == 503
assert conn.halted
end
end
defp build_conn do
conn(:get, "/test")
end
end
| 26.148936 | 79 | 0.672905 |
9e46f1806c0a87b72d94e9fa61918cefe7bb689a | 2,277 | ex | Elixir | lib/tesla/middleware/core.ex | hasclass/tesla | c1ee0e3417ea269124869ca1ff33fd2364391154 | [
"MIT"
] | 1 | 2020-12-21T03:45:23.000Z | 2020-12-21T03:45:23.000Z | lib/tesla/middleware/core.ex | hasclass/tesla | c1ee0e3417ea269124869ca1ff33fd2364391154 | [
"MIT"
] | null | null | null | lib/tesla/middleware/core.ex | hasclass/tesla | c1ee0e3417ea269124869ca1ff33fd2364391154 | [
"MIT"
] | null | null | null | defmodule Tesla.Middleware.BaseUrl do
@behaviour Tesla.Middleware
@moduledoc """
Set base URL for all requests.
The base URL will be prepended to request path/url only
if it does not include http(s).
### Example usage
```
defmodule MyClient do
use Tesla
plug Tesla.Middleware.BaseUrl, "https://api.github.com"
end
MyClient.get("/path") # equals to GET https://api.github.com/path
MyClient.get("http://example.com/path") # equals to GET http://example.com/path
```
"""
def call(env, next, base) do
env
|> apply_base(base)
|> Tesla.run(next)
end
defp apply_base(env, base) do
if Regex.match?(~r/^https?:\/\//, env.url) do
# skip if url is already with scheme
env
else
%{env | url: join(base, env.url)}
end
end
defp join(base, url) do
case {String.last(to_string(base)), url} do
{nil, url} -> url
{"/", "/" <> rest} -> base <> rest
{"/", rest} -> base <> rest
{_, "/" <> rest} -> base <> "/" <> rest
{_, rest} -> base <> "/" <> rest
end
end
end
defmodule Tesla.Middleware.Headers do
@behaviour Tesla.Middleware
@moduledoc """
Set default headers for all requests
### Example usage
```
defmodule Myclient do
use Tesla
plug Tesla.Middleware.Headers, [{"user-agent", "Tesla"}]
end
```
"""
def call(env, next, headers) do
env
|> Tesla.put_headers(headers)
|> Tesla.run(next)
end
end
defmodule Tesla.Middleware.Query do
@behaviour Tesla.Middleware
@moduledoc """
Set default query params for all requests
### Example usage
```
defmodule Myclient do
use Tesla
plug Tesla.Middleware.Query, [token: "some-token"]
end
```
"""
def call(env, next, query) do
env
|> merge(query)
|> Tesla.run(next)
end
defp merge(env, nil), do: env
defp merge(env, query) do
Map.update!(env, :query, &(&1 ++ query))
end
end
defmodule Tesla.Middleware.Opts do
@behaviour Tesla.Middleware
@moduledoc """
Set default opts for all requests
### Example usage
```
defmodule Myclient do
use Tesla
plug Tesla.Middleware.Opts, [some: "option"]
end
```
"""
def call(env, next, opts) do
Tesla.run(%{env | opts: env.opts ++ opts}, next)
end
end
| 19.29661 | 81 | 0.610892 |
9e46f3359ccdabc1009e362452d38293a6eaf424 | 1,102 | ex | Elixir | rojak-api/test/support/conn_case.ex | pyk/rojak | 0dd69efedb58ee5d951e1a43cdfa65b60f8bb7c7 | [
"BSD-3-Clause"
] | 107 | 2016-10-02T05:54:42.000Z | 2021-08-05T00:20:51.000Z | rojak-api/test/support/conn_case.ex | pyk/rojak | 0dd69efedb58ee5d951e1a43cdfa65b60f8bb7c7 | [
"BSD-3-Clause"
] | 134 | 2016-10-02T21:21:08.000Z | 2016-12-27T02:46:34.000Z | rojak-api/test/support/conn_case.ex | pyk/rojak | 0dd69efedb58ee5d951e1a43cdfa65b60f8bb7c7 | [
"BSD-3-Clause"
] | 54 | 2016-10-02T08:47:56.000Z | 2020-03-08T00:56:03.000Z | defmodule RojakAPI.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias RojakAPI.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import RojakAPI.Router.Helpers
# The default endpoint for testing
@endpoint RojakAPI.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(RojakAPI.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(RojakAPI.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 24.488889 | 70 | 0.705989 |
9e47397eb2d3dc55b0943654eb8e305038b2b527 | 163 | exs | Elixir | test/elixir_playground_test.exs | jeffcole/elixir_playground | 39d0b099c97cd50a60c4f8d698c2776738e62094 | [
"MIT"
] | null | null | null | test/elixir_playground_test.exs | jeffcole/elixir_playground | 39d0b099c97cd50a60c4f8d698c2776738e62094 | [
"MIT"
] | null | null | null | test/elixir_playground_test.exs | jeffcole/elixir_playground | 39d0b099c97cd50a60c4f8d698c2776738e62094 | [
"MIT"
] | null | null | null | require IEx
defmodule ElixirPlaygroundTest do
use ExUnit.Case
doctest ElixirPlayground
test "the truth" do
# IEx.pry()
assert 1 + 1 == 2
end
end
| 13.583333 | 33 | 0.687117 |
9e474141500d1cdaa89de6dfb1f3a2676d8be0a3 | 14,225 | ex | Elixir | lib/elixir/lib/system.ex | nurugger07/elixir | c859e49199bb24190656e6d3acb6cf35fe70e8bb | [
"Apache-2.0"
] | 1 | 2019-06-11T20:22:20.000Z | 2019-06-11T20:22:20.000Z | lib/elixir/lib/system.ex | nurugger07/elixir | c859e49199bb24190656e6d3acb6cf35fe70e8bb | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/system.ex | nurugger07/elixir | c859e49199bb24190656e6d3acb6cf35fe70e8bb | [
"Apache-2.0"
] | null | null | null | defmodule System do
@moduledoc """
The System module provides access to variables used or
maintained by the VM and to functions that interact directly
with the VM or the host system.
"""
defp strip_re(iodata, pattern) do
:re.replace(iodata, pattern, "", [return: :binary])
end
defp read_stripped(path) do
case :file.read_file(path) do
{:ok, binary} ->
strip_re(binary, "^\s+|\s+$")
_ -> ""
end
end
# Read and strip the version from the VERSION file.
defmacrop get_version do
case read_stripped(:filename.join(__DIR__, "../../../VERSION")) do
"" -> raise RuntimeError, message: "could not read the version number from VERSION"
data -> data
end
end
# Tries to run "git describe --always --tags". In the case of success returns
# the most recent tag. If that is not available, tries to read the commit hash
# from .git/HEAD. If that fails, returns an empty string.
defmacrop get_describe do
dirpath = :filename.join(__DIR__, "../../../.git")
case :file.read_file_info(dirpath) do
{:ok, _} ->
if :os.find_executable('git') do
data = :os.cmd('git describe --always --tags')
strip_re(data, "\n")
else
read_stripped(:filename.join(".git", "HEAD"))
end
_ -> ""
end
end
# Get the date at compilation time.
defmacrop get_date do
IO.iodata_to_binary :httpd_util.rfc1123_date
end
@doc """
Elixir version information.
Returns Elixir's version as binary.
"""
@spec version() :: String.t
def version, do: get_version
@doc """
Elixir build information.
Returns a keyword list with Elixir version, git tag info and compilation date.
"""
@spec build_info() :: map
def build_info do
%{version: version, tag: get_describe, date: get_date}
end
@doc """
Lists command line arguments.
Returns the list of command line arguments passed to the program.
"""
@spec argv() :: [String.t]
def argv do
:elixir_config.get(:argv)
end
@doc """
Modifies command line arguments.
Changes the list of command line arguments. Use it with caution,
as it destroys any previous argv information.
"""
@spec argv([String.t]) :: :ok
def argv(args) do
:elixir_config.put(:argv, args)
end
@doc """
Current working directory.
Returns the current working directory or `nil` if one
is not available.
"""
def cwd do
case :file.get_cwd do
{:ok, base} -> IO.chardata_to_string(fix_drive_letter(base))
_ -> nil
end
end
defp fix_drive_letter([l, ?:, ?/ | rest] = original) when l in ?A..?Z do
case :os.type() do
{:win32, _} -> [l+?a-?A, ?:, ?/ | rest]
_ -> original
end
end
defp fix_drive_letter(original), do: original
@doc """
Current working directory, exception on error.
Returns the current working directory or raises `RuntimeError`.
"""
def cwd! do
cwd ||
raise RuntimeError, message: "could not get a current working directory, the current location is not accessible"
end
@doc """
User home directory.
Returns the user home directory (platform independent).
"""
def user_home do
:elixir_config.get(:home)
end
@doc """
User home directory, exception on error.
Same as `user_home/0` but raises `RuntimeError`
instead of returning `nil` if no user home is set.
"""
def user_home! do
user_home ||
raise RuntimeError, message: "could not find the user home, please set the HOME environment variable"
end
@doc ~S"""
Writable temporary directory.
Returns a writable temporary directory.
Searches for directories in the following order:
1. the directory named by the TMPDIR environment variable
2. the directory named by the TEMP environment variable
3. the directory named by the TMP environment variable
4. `C:\TMP` on Windows or `/tmp` on Unix
5. as a last resort, the current working directory
Returns `nil` if none of the above are writable.
"""
def tmp_dir do
write_env_tmp_dir('TMPDIR') ||
write_env_tmp_dir('TEMP') ||
write_env_tmp_dir('TMP') ||
write_tmp_dir('/tmp') ||
((cwd = cwd()) && write_tmp_dir(cwd))
end
@doc """
Writable temporary directory, exception on error.
Same as `tmp_dir/0` but raises `RuntimeError`
instead of returning `nil` if no temp dir is set.
"""
def tmp_dir! do
tmp_dir ||
raise RuntimeError, message: "could not get a writable temporary directory, " <>
"please set the TMPDIR environment variable"
end
defp write_env_tmp_dir(env) do
case :os.getenv(env) do
false -> nil
tmp -> write_tmp_dir(tmp)
end
end
defp write_tmp_dir(dir) do
case File.stat(dir) do
{:ok, stat} ->
case {stat.type, stat.access} do
{:directory, access} when access in [:read_write, :write] ->
IO.chardata_to_string(dir)
_ ->
nil
end
{:error, _} -> nil
end
end
@doc """
Registers a program exit handler function.
Registers a function that will be invoked at the end of program execution.
Useful for invoking a hook in "script" mode.
The handler always executes in a different process from the one it was
registered in. As a consequence, any resources managed by the calling process
(ETS tables, open files, etc.) won't be available by the time the handler
function is invoked.
The function must receive the exit status code as an argument.
"""
def at_exit(fun) when is_function(fun, 1) do
:elixir_config.update :at_exit, &[fun|&1]
end
@doc """
Locates an executable on the system.
This function looks up an executable program given
its name using the environment variable PATH on Unix
and Windows. It also considers the proper executable
extension for each OS, so for Windows it will try to
lookup files with `.com`, `.cmd` or similar extensions.
"""
@spec find_executable(binary) :: binary | nil
def find_executable(program) when is_binary(program) do
case :os.find_executable(String.to_char_list(program)) do
false -> nil
other -> List.to_string(other)
end
end
@doc """
System environment variables.
Returns a list of all environment variables. Each variable is given as a
`{name, value}` tuple where both `name` and `value` are strings.
"""
@spec get_env() :: %{String.t => String.t}
def get_env do
Enum.into(:os.getenv, %{}, fn var ->
var = IO.chardata_to_string var
[k, v] = String.split var, "=", parts: 2
{k, v}
end)
end
@doc """
Environment variable value.
Returns the value of the environment variable
`varname` as a binary, or `nil` if the environment
variable is undefined.
"""
@spec get_env(binary) :: binary | nil
def get_env(varname) when is_binary(varname) do
case :os.getenv(String.to_char_list(varname)) do
false -> nil
other -> List.to_string(other)
end
end
@doc """
Erlang VM process identifier.
Returns the process identifier of the current Erlang emulator
in the format most commonly used by the operating system environment.
For more information, see [`:os.getpid/0`](http://www.erlang.org/doc/man/os.html#getpid-0).
"""
@spec get_pid() :: binary
def get_pid, do: IO.iodata_to_binary(:os.getpid)
@doc """
Sets an environment variable value.
Sets a new `value` for the environment variable `varname`.
"""
@spec put_env(binary, binary) :: :ok
def put_env(varname, value) when is_binary(varname) and is_binary(value) do
:os.putenv String.to_char_list(varname), String.to_char_list(value)
:ok
end
@doc """
Sets multiple environment variables.
Sets a new value for each environment variable corresponding
to each key in `dict`.
"""
@spec put_env(Dict.t) :: :ok
def put_env(dict) do
Enum.each dict, fn {key, val} -> put_env key, val end
end
@doc """
Deletes an environment variable.
Removes the variable `varname` from the environment.
"""
@spec delete_env(String.t) :: :ok
def delete_env(varname) do
:os.unsetenv(String.to_char_list(varname))
:ok
end
@doc """
Last exception stacktrace.
Note that the Erlang VM (and therefore this function) does not
return the current stacktrace but rather the stacktrace of the
latest exception.
Inlined by the compiler into `:erlang.get_stacktrace/0`.
"""
def stacktrace do
:erlang.get_stacktrace
end
@doc """
Halts the Erlang runtime system.
Halts the Erlang runtime system where the argument `status` must be a
non-negative integer, the atom `:abort` or a binary.
* If an integer, the runtime system exits with the integer value which
is returned to the operating system.
* If `:abort`, the runtime system aborts producing a core dump, if that is
enabled in the operating system.
* If a string, an Erlang crash dump is produced with status as slogan,
and then the runtime system exits with status code 1.
Note that on many platforms, only the status codes 0-255 are supported
by the operating system.
For more information, see [`:erlang.halt/1`](http://www.erlang.org/doc/man/erlang.html#halt-1).
## Examples
System.halt(0)
System.halt(1)
System.halt(:abort)
"""
@spec halt() :: no_return
@spec halt(non_neg_integer | binary | :abort) :: no_return
def halt(status \\ 0)
def halt(status) when is_integer(status) or status == :abort do
:erlang.halt(status)
end
def halt(status) when is_binary(status) do
:erlang.halt(String.to_char_list(status))
end
@doc ~S"""
Executes the given `command` with `args`.
`command` is expected to be an executable available in PATH
unless an absolute path is given.
`args` must be a list of strings which are not expanded
in any way. For example, this means wildcard expansion will
not happen unless `Path.wildcard/2` is used. On Windows though,
wildcard expansion is up to the program.
This function returns a tuple containing the collected result
and the command exit status.
## Examples
iex> System.cmd "echo", ["hello"]
{"hello\n", 0}
iex> System.cmd "echo", ["hello"], env: [{"MIX_ENV", "test"}]
{"hello\n", 0}
iex> System.cmd "echo", ["hello"], into: IO.stream(:stdio, :line)
hello
{%IO.Stream{}, 0}
## Options
* `:into` - injects the result into the given collectable, defaults to `""`
* `:cd` - the directory to run the command in
* `:env` - an enumerable of tuples containing environment key-value as binary
* `:arg0` - set the command arg0
* `:stderr_to_stdout` - redirects stderr to stdout when `true`
* `:parallelism` - when `true`, the VM will schedule port tasks to improve
parallelism in the system. If set to `false`, the VM will try to perform
commands immediately, improving latency at the expense of parallelism.
The default can be set on system startup by passing the "+spp" argument
to `--erl`.
## Error reasons
If invalid arguments are given, `ArgumentError` is raised by
`System.cmd/3`. `System.cmd/3` also expects a strict set of
options and will raise if unknown or invalid options are given.
Furthermore, `System.cmd/3` may fail with one of the POSIX reasons
detailed below:
* `:system_limit` - all available ports in the Erlang emulator are in use
* `:enomem` - there was not enough memory to create the port
* `:eagain` - there are no more available operating system processes
* `:enametoolong` - the external command given was too long
* `:emfile` - there are no more available file descriptors
(for the operating system process that the Erlang emulator runs in)
* `:enfile` - the file table is full (for the entire operating system)
* `:eacces` - the command does not point to an executable file
* `:enoent` - the command does not point to an existing file
## Shell commands
If you desire to execute a trusted command inside a shell, with pipes,
redirecting and so on, please check
[`:os.cmd/1`](http://www.erlang.org/doc/man/os.html#cmd-1).
"""
@spec cmd(binary, [binary], Keyword.t) ::
{Collectable.t, exit_status :: non_neg_integer}
def cmd(command, args, opts \\ []) when is_binary(command) and is_list(args) do
cmd = String.to_char_list(command)
cmd =
if Path.type(cmd) == :absolute do
cmd
else
:os.find_executable(cmd) || :erlang.error(:enoent, [command, args, opts])
end
{into, opts} = cmd_opts(opts, [:use_stdio, :exit_status, :binary, :hide, args: args], "")
{initial, fun} = Collectable.into(into)
do_cmd Port.open({:spawn_executable, cmd}, opts), initial, fun
end
defp do_cmd(port, acc, fun) do
receive do
{^port, {:data, data}} ->
do_cmd(port, fun.(acc, {:cont, data}), fun)
{^port, {:exit_status, status}} ->
{fun.(acc, :done), status}
end
end
defp cmd_opts([{:into, any}|t], opts, _into),
do: cmd_opts(t, opts, any)
defp cmd_opts([{:cd, bin}|t], opts, into) when is_binary(bin),
do: cmd_opts(t, [{:cd, bin}|opts], into)
defp cmd_opts([{:arg0, bin}|t], opts, into) when is_binary(bin),
do: cmd_opts(t, [{:arg0, bin}|opts], into)
defp cmd_opts([{:stderr_to_stdout, true}|t], opts, into),
do: cmd_opts(t, [:stderr_to_stdout|opts], into)
defp cmd_opts([{:stderr_to_stdout, false}|t], opts, into),
do: cmd_opts(t, opts, into)
defp cmd_opts([{:parallelism, bool}|t], opts, into) when is_boolean(bool),
do: cmd_opts(t, [{:parallelism, bool}|opts], into)
defp cmd_opts([{:env, enum}|t], opts, into),
do: cmd_opts(t, [{:env, validate_env(enum)}|opts], into)
defp cmd_opts([{key, val}|_], _opts, _into),
do: raise(ArgumentError, "invalid option #{inspect key} with value #{inspect val}")
defp cmd_opts([], opts, into),
do: {into, opts}
defp validate_env(enum) do
Enum.map enum, fn
{k, v} ->
{String.to_char_list(k), String.to_char_list(v)}
other ->
raise ArgumentError, "invalid environment key-value #{inspect other}"
end
end
end
| 29.14959 | 118 | 0.660738 |
9e4745b179b4d2d32c3504efa0eb50c6a18e370c | 3,527 | ex | Elixir | lib/taex/moving_average.ex | benyblack/Taex | a98bb1523497c455ce626cadd345185ecc03cea8 | [
"MIT"
] | 20 | 2017-07-17T13:08:21.000Z | 2021-07-15T05:58:19.000Z | lib/taex/moving_average.ex | benyblack/Taex | a98bb1523497c455ce626cadd345185ecc03cea8 | [
"MIT"
] | 3 | 2017-09-06T12:23:45.000Z | 2021-05-25T07:11:06.000Z | lib/taex/moving_average.ex | benyblack/Taex | a98bb1523497c455ce626cadd345185ecc03cea8 | [
"MIT"
] | 6 | 2017-09-01T15:43:17.000Z | 2019-12-30T07:58:25.000Z | defmodule Taex.MovingAverage do
defmodule DoubleEma do
defstruct [:ema, :ema_2, :value]
end
defmodule TripleEma do
defstruct [:ema, :ema_2, :ema_3, :value]
end
defmodule VolumeWeightedMovingAverage do
defstruct [:prices, :volumes, :periods, :value]
def update(%VolumeWeightedMovingAverage{} = vwma, market_price, volume) do
vwma = %{vwma | prices: vwma.prices ++ [market_price], volumes: vwma.volumes ++ [volume]}
vwma = if Enum.count(vwma.prices) > vwma.periods do
[_head | prices] = vwma.prices
[_head | volumes] = vwma.volumes
%{vwma | prices: prices, volumes: volumes}
else
vwma
end
numerator = vwma.prices
|> Enum.with_index
|> Enum.map(fn({price, index})->
price * Enum.at(vwma.volumes, index)
end)
|> Enum.sum
%{vwma | value: numerator/Enum.sum(vwma.volumes)}
end
end
@doc """
Calculates the simple moving average which is just the sum of the items passed in divided by the number of items
"""
@spec simple(integer, [float]) :: float
def simple(items) when is_list(items), do: Enum.sum(items) / Enum.count(items)
def simple(0, _), do: 0
def simple(n, items) when is_list(items) do
sum = items |> Enum.take(n) |> Enum.sum
(sum / n)
end
@spec exponential(integer, [float]) :: float
def exponential(_, []), do: 0
def exponential(n, prices) do
[head | _] = exp_calc(n, prices)
head
end
def exponential(n, price, previous_ema) do
exp_calc(n, [price], [previous_ema]) |> Enum.at(0)
end
@spec double_ema(integer, [float]) :: float
def double_ema(_, []), do: 0
def double_ema(n, prices) do
emas = exp_calc(n, prices)
ema_2 = exp_calc(n, emas |> Enum.reverse) |> Enum.at(0)
ema = emas |> Enum.at(0)
%DoubleEma{ema: ema, ema_2: ema_2, value: 2 * ema - ema_2}
end
def double_ema(n, price, %DoubleEma{ema: previous_ema, ema_2: previous_ema_2}) do
ema = exp_calc(n, [price], [previous_ema]) |> Enum.at(0)
ema_2 = exp_calc(n, [ema], [previous_ema_2]) |> Enum.at(0)
%DoubleEma{ema: ema, ema_2: ema_2, value: 2 * ema - ema_2}
end
@spec triple_ema(integer, [float]) :: float
def triple_ema(_, []), do: 0
def triple_ema(n, prices) do
emas = exp_calc(n, prices)
ema_2s = exp_calc(n, emas |> Enum.reverse)
ema_3 = exp_calc(n, ema_2s |> Enum.reverse) |> Enum.at(0)
ema = emas |> Enum.at(0)
ema_2 = ema_2s |> Enum.at(0)
%TripleEma{ema: ema, ema_2: ema_2, ema_3: ema_3, value: (3 * ema - 3 * ema_2) + ema_3}
end
def triple_ema(n, price, %TripleEma{ema: previous_ema, ema_2: previous_ema_2, ema_3: previous_ema_3}) do
ema = exp_calc(n, [price], [previous_ema]) |> Enum.at(0)
ema_2 = exp_calc(n, [ema], [previous_ema_2]) |> Enum.at(0)
ema_3 = exp_calc(n, [ema_2], [previous_ema_3]) |> Enum.at(0)
%TripleEma{ema: ema, ema_2: ema_2, ema_3: ema_3, value: (3 * ema - 3 * ema_2) + ema_3}
end
@spec weighting_multiplier(integer) :: float
defp weighting_multiplier(n) do
2 / (n + 1)
end
@spec exp_calc(integer, [float], [float]) :: [float]
defp exp_calc(_, [], emas), do: emas
defp exp_calc(n, [p | tl], [ema_head | ema_tail]) do
k = weighting_multiplier(n)
exp_calc(n, tl, [(p * k) + (ema_head * (1 - k))] ++ [ema_head] ++ ema_tail)
end
defp exp_calc(k, [hd | tl]), do: exp_calc(k, tl, [hd])
@spec weighted([{float, float}]) :: float
def weighted(items) do
Enum.reduce(items, 0, fn {a, b} , acc -> acc + (a * b) end)
end
end | 33.590476 | 115 | 0.623193 |
9e476faa91ecfbad85068ee8399d9448e7832c10 | 227 | exs | Elixir | test/handlers/rcpt_test.exs | maxneuvians/pique | 1f153d98e18cf58736abadf6efa73386eedb077c | [
"MIT"
] | 13 | 2020-03-02T03:30:04.000Z | 2022-02-21T10:29:59.000Z | test/handlers/rcpt_test.exs | maxneuvians/pique | 1f153d98e18cf58736abadf6efa73386eedb077c | [
"MIT"
] | null | null | null | test/handlers/rcpt_test.exs | maxneuvians/pique | 1f153d98e18cf58736abadf6efa73386eedb077c | [
"MIT"
] | null | null | null | defmodule Pique.Handlers.RCPTTest do
use ExUnit.Case
import Pique.Handlers.RCPT
describe "handle/1" do
test "returns {:ok, state} state is passed passed" do
assert handle(%{}) == {:ok, %{}}
end
end
end
| 17.461538 | 57 | 0.647577 |
9e478de2ac1ff11c54744bd630953686ef7d1deb | 6,222 | ex | Elixir | lib/iex/lib/iex/evaluator.ex | pap/elixir | c803afe90c766663823c74397fb23ed40ec52c5b | [
"Apache-2.0"
] | null | null | null | lib/iex/lib/iex/evaluator.ex | pap/elixir | c803afe90c766663823c74397fb23ed40ec52c5b | [
"Apache-2.0"
] | null | null | null | lib/iex/lib/iex/evaluator.ex | pap/elixir | c803afe90c766663823c74397fb23ed40ec52c5b | [
"Apache-2.0"
] | null | null | null | defmodule IEx.Evaluator do
@moduledoc false
@doc """
Eval loop for an IEx session. Its responsibilities include:
* loading of .iex files
* evaluating code
* trapping exceptions in the code being evaluated
* keeping expression history
"""
def start(server, leader) do
old_leader = Process.group_leader
Process.group_leader(self, leader)
try do
loop(server, IEx.History.init)
after
Process.group_leader(self, old_leader)
end
end
defp loop(server, history) do
receive do
{:eval, ^server, code, state} ->
{result, history} = eval(code, state, history)
send server, {:evaled, self, result}
loop(server, history)
{:done, ^server} ->
:ok
end
end
@doc """
Locates and loads an .iex.exs file from one of predefined locations.
Returns the new state.
"""
def load_dot_iex(state, path \\ nil) do
candidates = if path do
[path]
else
Enum.map [".iex.exs", "~/.iex.exs"], &Path.expand/1
end
path = Enum.find candidates, &File.regular?/1
if is_nil(path) do
state
else
eval_dot_iex(state, path)
end
end
defp eval_dot_iex(state, path) do
try do
code = File.read!(path)
env = :elixir.env_for_eval(state.env, file: path, line: 1)
# Evaluate the contents in the same environment server_loop will run in
{_result, binding, env, _scope} =
:elixir.eval(String.to_char_list(code), state.binding, env)
%{state | binding: binding, env: :elixir.env_for_eval(env, file: "iex", line: 1)}
catch
kind, error ->
io_result "Error while evaluating: #{path}"
print_error(kind, error, System.stacktrace)
System.halt(1)
end
end
# Instead of doing just :elixir.eval, we first parse the expression to see
# if it's well formed. If parsing succeeds, we evaluate the AST as usual.
#
# If parsing fails, this might be a TokenMissingError which we treat in
# a special way (to allow for continuation of an expression on the next
# line in IEx). In case of any other error, we let :elixir_translator
# to re-raise it.
#
# Returns updated state.
#
# The first two clauses provide support for the break-trigger allowing to
# break out from a pending incomplete expression. See
# https://github.com/elixir-lang/elixir/issues/1089 for discussion.
@break_trigger '#iex:break\n'
defp eval(code, state, history) do
try do
do_eval(String.to_char_list(code), state, history)
catch
kind, error ->
print_error(kind, error, System.stacktrace)
{%{state | cache: ''}, history}
end
end
defp do_eval(@break_trigger, %IEx.State{cache: ''} = state, history) do
{state, history}
end
defp do_eval(@break_trigger, state, _history) do
:elixir_errors.parse_error(state.counter, "iex", "incomplete expression", "")
end
defp do_eval(latest_input, state, history) do
code = state.cache ++ latest_input
line = state.counter
Process.put(:iex_history, history)
handle_eval(Code.string_to_quoted(code, [line: line, file: "iex"]), code, line, state, history)
after
Process.delete(:iex_history)
end
defp handle_eval({:ok, forms}, code, line, state, history) do
{result, binding, env, scope} =
:elixir.eval_forms(forms, state.binding, state.env, state.scope)
unless result == IEx.dont_display_result, do: io_inspect(result)
state =
%{state | env: env, cache: '',
scope: scope, binding: binding,
counter: state.counter + 1}
{state, update_history(history, line, code, result)}
end
defp handle_eval({:error, {_, _, ""}}, code, _line, state, history) do
# Update state.cache so that IEx continues to add new input to
# the unfinished expression in "code"
{%{state | cache: code}, history}
end
defp handle_eval({:error, {line, error, token}}, _code, _line, _state, _) do
# Encountered malformed expression
:elixir_errors.parse_error(line, "iex", error, token)
end
defp update_history(history, counter, cache, result) do
IEx.History.append(history, {counter, cache, result}, IEx.Config.history_size)
end
defp io_inspect(result) do
io_result inspect(result, IEx.inspect_opts)
end
defp io_result(result) do
IO.puts :stdio, IEx.color(:eval_result, result)
end
defp io_error(result) do
IO.puts :stdio, IEx.color(:eval_error, result)
end
## Error handling
defp print_error(kind, reason, stacktrace) do
Exception.format_banner(kind, reason, stacktrace) |> io_error
stacktrace |> prune_stacktrace |> format_stacktrace |> io_error
end
@elixir_internals [:elixir, :elixir_exp, :elixir_compiler, :elixir_module, :elixir_clauses,
:elixir_translator, :elixir_expand, :elixir_lexical, :elixir_exp_clauses,
:elixir_def]
defp prune_stacktrace(stacktrace) do
# The order in which each drop_while is listed is important.
# For example, the user my call Code.eval_string/2 in IEx
# and if there is an error we should not remove erl_eval
# and eval_bits information from the user stacktrace.
stacktrace
|> Enum.reverse()
|> Enum.drop_while(&(elem(&1, 0) == __MODULE__))
|> Enum.drop_while(&(elem(&1, 0) == :elixir))
|> Enum.drop_while(&(elem(&1, 0) in [:erl_eval, :eval_bits]))
|> Enum.reverse()
|> Enum.reject(&(elem(&1, 0) in @elixir_internals))
end
@doc false
def format_stacktrace(trace) do
entries =
for entry <- trace do
split_entry(Exception.format_stacktrace_entry(entry))
end
width = Enum.reduce entries, 0, fn {app, _}, acc ->
max(String.length(app), acc)
end
" " <> Enum.map_join(entries, "\n ", &format_entry(&1, width))
end
defp split_entry(entry) do
case entry do
"(" <> _ ->
case :binary.split(entry, ") ") do
[left, right] -> {left <> ") ", right}
_ -> {"", entry}
end
_ ->
{"", entry}
end
end
defp format_entry({app, info}, width) do
app = String.rjust(app, width)
IEx.color(:stack_app, app) <> IEx.color(:stack_info, info)
end
end
| 29.770335 | 99 | 0.64738 |
9e479eef9f1d72c5f9bffde29ba38d24122f43eb | 398 | exs | Elixir | test/discount_test.exs | goravbhootra/braintree-elixir | 53c10216125c13c05a19edf1dc1c98a818c46e09 | [
"MIT"
] | null | null | null | test/discount_test.exs | goravbhootra/braintree-elixir | 53c10216125c13c05a19edf1dc1c98a818c46e09 | [
"MIT"
] | null | null | null | test/discount_test.exs | goravbhootra/braintree-elixir | 53c10216125c13c05a19edf1dc1c98a818c46e09 | [
"MIT"
] | null | null | null | defmodule Braintree.DiscountTest do
use ExUnit.Case, async: true
alias Braintree.Discount
describe "new/1" do
test "builds a sane struct" do
discount = Discount.new(%{
"id" => "asdf1234",
"amount" => "25.00"
})
assert discount.id == "asdf1234"
refute discount.never_expires?
assert discount.number_of_billing_cycles == 0
end
end
end
| 20.947368 | 51 | 0.630653 |
9e47a612cb04873ac1bd0b2a6454f9c909813021 | 1,236 | ex | Elixir | test/event/support/upcast/events.ex | Cantido/commanded | e8058c4381e16547e6960eaf9f38619c6a43b18f | [
"MIT"
] | 1,220 | 2017-10-31T10:56:40.000Z | 2022-03-31T17:40:19.000Z | test/event/support/upcast/events.ex | Cantido/commanded | e8058c4381e16547e6960eaf9f38619c6a43b18f | [
"MIT"
] | 294 | 2017-11-03T10:33:41.000Z | 2022-03-24T08:36:42.000Z | test/event/support/upcast/events.ex | Cantido/commanded | e8058c4381e16547e6960eaf9f38619c6a43b18f | [
"MIT"
] | 208 | 2017-11-03T10:56:47.000Z | 2022-03-14T05:49:38.000Z | defmodule Commanded.Event.Upcast.Events do
alias Commanded.Event.Upcaster
defmodule EventOne do
@derive Jason.Encoder
defstruct [:version, :reply_to, :process_id]
end
defmodule EventTwo do
@derive Jason.Encoder
defstruct [:version, :reply_to, :process_id]
defimpl Upcaster do
def upcast(%EventTwo{} = event, _metadata) do
%EventTwo{event | version: 2}
end
end
end
defmodule EventThree do
@derive Jason.Encoder
defstruct [:version, :reply_to, :process_id]
end
defmodule EventFour do
@derive Jason.Encoder
defstruct [:version, :name, :reply_to, :process_id]
end
defmodule EventFive do
@derive Jason.Encoder
defstruct [:version, :reply_to, :process_id, :event_metadata]
defimpl Upcaster do
def upcast(%EventFive{} = event, metadata) do
%EventFive{event | version: 2, event_metadata: metadata}
end
end
end
defimpl Upcaster, for: EventThree do
def upcast(%EventThree{} = event, _metadata) do
data = Map.from_struct(event) |> Map.put(:name, "Chris") |> Map.put(:version, 2)
struct(EventFour, data)
end
end
defmodule Stop do
@derive Jason.Encoder
defstruct [:process_id]
end
end
| 22.888889 | 86 | 0.673948 |
9e48269bbdfa1114b368f3ae87e5db4badd94e81 | 363 | ex | Elixir | lib/slurp/iex/commands/help.ex | AwaitFuture/slurp | f1d0fe5feb21f3135727c1516908e89130ea5801 | [
"MIT"
] | 20 | 2021-01-02T07:45:04.000Z | 2022-03-29T07:34:42.000Z | lib/slurp/iex/commands/help.ex | AwaitFuture/slurp | f1d0fe5feb21f3135727c1516908e89130ea5801 | [
"MIT"
] | 11 | 2021-01-25T13:10:34.000Z | 2021-09-23T05:34:08.000Z | lib/slurp/iex/commands/help.ex | AwaitFuture/slurp | f1d0fe5feb21f3135727c1516908e89130ea5801 | [
"MIT"
] | 1 | 2021-03-12T07:27:05.000Z | 2021-03-12T07:27:05.000Z | defmodule Slurp.IEx.Commands.Help do
def help do
IO.puts("""
* help
* blockchains [where: [...], order: [...]]
* start_blockchains [where: [...]]
* stop_blockchains [where: [...]]
* new_head_subscriptions [where: [...], order: [...]]
* log_subscriptions [where: [...], order: [...]]
""")
IEx.dont_display_result()
end
end
| 24.2 | 57 | 0.553719 |
9e4827f24e3ef7bc222e9c00b0857fd7fd460c4f | 2,000 | exs | Elixir | config/dev.exs | dreamingechoes/multi_tenancex | cfe3feb6b7eb25559f9abaa4da89e4aafc9ad2ec | [
"MIT"
] | 30 | 2018-06-27T17:51:53.000Z | 2021-04-24T03:17:55.000Z | config/dev.exs | dreamingechoes/multi_tenancex | cfe3feb6b7eb25559f9abaa4da89e4aafc9ad2ec | [
"MIT"
] | null | null | null | config/dev.exs | dreamingechoes/multi_tenancex | cfe3feb6b7eb25559f9abaa4da89e4aafc9ad2ec | [
"MIT"
] | 7 | 2018-07-24T17:56:14.000Z | 2019-12-31T02:10:13.000Z | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :multi_tenancex, MultiTenancexWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/brunch/bin/brunch",
"watch",
"--stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# command from your terminal:
#
# openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -keyout priv/server.key -out priv/server.pem
#
# The `http:` config above can be replaced with:
#
# https: [port: 4000, keyfile: "priv/server.key", certfile: "priv/server.pem"],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :multi_tenancex, MultiTenancexWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/multi_tenancex_web/views/.*(ex)$},
~r{lib/multi_tenancex_web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Configure your database
config :multi_tenancex, MultiTenancex.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "multi_tenancex_dev",
hostname: "db",
pool_size: 10
| 30.769231 | 170 | 0.7045 |
9e482b6c594272326de85fdd4ddb1d5c6ec7d3ce | 1,284 | exs | Elixir | test/auth/bcrypt_test.exs | Fibrasek/doorman | 1b32fc3d3dd39d077d69ad1d3ef62f80f3af59f4 | [
"MIT"
] | null | null | null | test/auth/bcrypt_test.exs | Fibrasek/doorman | 1b32fc3d3dd39d077d69ad1d3ef62f80f3af59f4 | [
"MIT"
] | null | null | null | test/auth/bcrypt_test.exs | Fibrasek/doorman | 1b32fc3d3dd39d077d69ad1d3ef62f80f3af59f4 | [
"MIT"
] | null | null | null | defmodule Doorman.Auth.BcryptTest do
use Doorman.ConnCase
alias Doorman.Auth.Bcrypt
defmodule FakeUser do
use Ecto.Schema
import Ecto.Changeset
schema "fake_users" do
field :hashed_password
field :password, :string, virtual: true
end
def create_changeset(changes) do
%__MODULE__{}
|> cast(changes, ~w(password))
|> Doorman.Auth.Bcrypt.hash_password
end
end
test "hash_password sets encrypted password on changeset when virtual field is present" do
changeset = FakeUser.create_changeset(%{password: "foobar"})
assert changeset.changes[:hashed_password]
end
test "hash_password does not set encrypted password on changeset when virtual field is not present" do
changeset = FakeUser.create_changeset(%{})
refute changeset.changes[:hashed_password]
end
test "authenticate returns true when password matches" do
password = "secure"
user = %FakeUser{hashed_password: Comeonin.Bcrypt.hashpwsalt(password)}
assert Bcrypt.authenticate(user, password)
end
test "authenticate returns false when password does not match" do
password = "secure"
user = %FakeUser{hashed_password: Comeonin.Bcrypt.hashpwsalt(password)}
refute Bcrypt.authenticate(user, "wrong")
end
end
| 26.75 | 104 | 0.728193 |
9e483a5a06291033b6094dd48da1e592f3e4e515 | 1,070 | ex | Elixir | priv/example-projects/foo_bar_umbrella/apps/foo_bar_web/lib/foo_bar_web/router.ex | c4710n/phx_custom | 19080ed8896be9ae846d12b2f631a1bc4aef5c78 | [
"MIT"
] | null | null | null | priv/example-projects/foo_bar_umbrella/apps/foo_bar_web/lib/foo_bar_web/router.ex | c4710n/phx_custom | 19080ed8896be9ae846d12b2f631a1bc4aef5c78 | [
"MIT"
] | null | null | null | priv/example-projects/foo_bar_umbrella/apps/foo_bar_web/lib/foo_bar_web/router.ex | c4710n/phx_custom | 19080ed8896be9ae846d12b2f631a1bc4aef5c78 | [
"MIT"
] | null | null | null | defmodule FooBarWeb.Router do
use FooBarWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", FooBarWeb do
pipe_through :browser
get "/", PageController, :index
end
# Other scopes may use custom stacks.
# scope "/api", FooBarWeb do
# pipe_through :api
# end
# Enables LiveDashboard only for development
#
# If you want to use the LiveDashboard in production, you should put
# it behind authentication and allow only admins to access it.
# If your application does not have an admins-only section yet,
# you can use Plug.BasicAuth to set up some basic authentication
# as long as you are also using SSL (which you should anyway).
if Mix.env() in [:dev, :test] do
import Phoenix.LiveDashboard.Router
scope "/" do
pipe_through :browser
live_dashboard "/dashboard", metrics: FooBarWeb.Telemetry
end
end
end
| 24.883721 | 70 | 0.697196 |
9e486adf6353834d7300f8e748aa4c6234ab3ee0 | 674 | exs | Elixir | test/web/controller/admin/skill_controller_test.exs | stevegrossi/ex_venture | e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa | [
"MIT"
] | 2 | 2019-05-14T11:36:44.000Z | 2020-07-01T08:54:04.000Z | test/web/controller/admin/skill_controller_test.exs | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | null | null | null | test/web/controller/admin/skill_controller_test.exs | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | 1 | 2021-01-29T14:12:40.000Z | 2021-01-29T14:12:40.000Z | defmodule Web.Admin.SkillControllerTest do
use Web.AuthConnCase
test "create a skill", %{conn: conn} do
params = %{
name: "Slash",
command: "slash",
description: "Slash at the target",
level: "1",
user_text: "You slash at your {target}",
usee_text: "You are slashed at by {who}",
points: 3,
effects: "[]",
}
conn = post conn, skill_path(conn, :create), skill: params
assert html_response(conn, 302)
end
test "update a skill", %{conn: conn} do
skill = create_skill()
conn = put conn, skill_path(conn, :update, skill.id), skill: %{name: "Dodge"}
assert html_response(conn, 302)
end
end
| 24.962963 | 81 | 0.61276 |
9e48a74d4e2d4be1e63f4ffb67614a7e30a21301 | 5,476 | ex | Elixir | lib/ex_csv/parser.ex | CargoSense/ex_csv | 247df4f6d244b5fdbad52dd27385716a6bd48825 | [
"MIT",
"Unlicense"
] | 46 | 2015-07-27T15:11:46.000Z | 2022-03-18T04:36:25.000Z | lib/ex_csv/parser.ex | CargoSense/ex_csv | 247df4f6d244b5fdbad52dd27385716a6bd48825 | [
"MIT",
"Unlicense"
] | 7 | 2015-06-23T17:55:27.000Z | 2016-08-03T20:51:23.000Z | lib/ex_csv/parser.ex | CargoSense/ex_csv | 247df4f6d244b5fdbad52dd27385716a6bd48825 | [
"MIT",
"Unlicense"
] | 12 | 2015-04-11T05:26:40.000Z | 2018-09-25T15:00:12.000Z | defmodule ExCsv.Parser do
defstruct delimiter: 44, return: 13, newline: 10, quote: 34, headings: false, quoting: false, quote_at: nil, eat_next_quote: true
def parse!(text, opts \\ []) do
case parse(text, opts) do
{:ok, table} -> table
{:error, err} -> raise ArgumentError, err
end
end
def parse(text, opts \\ []) do
do_parse(text, opts |> configure)
end
defp do_parse(iodata, config) when is_list(iodata) do
iodata |> IO.iodata_to_binary |> do_parse(config)
end
defp do_parse(string, config) when is_binary(string) do
{result, state} = string |> skip_dotall |> build([[""]], config)
if state.quoting do
info = result |> hd |> hd |> String.slice(0, 10)
{:error, "quote meets end of file; started near: #{info}"}
else
[head | tail] = result |> rstrip |> Enum.reverse |> Enum.map(&(Enum.reverse(&1)))
case config.headings do
true -> {:ok, %ExCsv.Table{headings: head, body: tail}}
false -> {:ok, %ExCsv.Table{body: [head | tail]}}
end
end
end
defp configure(settings) do
settings |> configure(%ExCsv.Parser{})
end
defp configure([], config), do: config
defp configure([head | tail], config) do
tail |> configure(config |> Map.merge(head |> setting))
end
# The delimiter, newline, and quote settings need to be integers
# @spec setting({atom, char_list}) :: %{atom => integer}
defp setting({key, value}) when key in [:delimiter, :newline, :quote] do
[{key, value |> hd}] |> Enum.into(%{})
end
defp setting({key, value}), do: [{key, value}] |> Enum.into(%{})
# DELIMITER
# At the beginning of a row
defp build(<<char>> <> rest, [[] | previous_rows], %{delimiter: char, quoting: false} = config) do
current_row = [new_field, new_field]
rows = [current_row | previous_rows]
rest |> skip_whitespace |> build(rows, config)
end
# After the beginning of a row
defp build(<<char>> <> rest, [[current_field | previous_fields] | previous_rows], %{delimiter: char, quoting: false} = config) do
current_row = [new_field | [current_field |> String.rstrip | previous_fields]]
rows = [current_row | previous_rows]
rest |> skip_whitespace |> build(rows, config)
end
# QUOTE
# Start quote at the beginning of a field (don't retain this quote pair)
defp build(<<char>> <> rest, [["" | _previous_fields] | _previous_rows] = rows, %{quote: char, quoting: false} = config) do
rest |> build(rows, %{ config | quoting: true, eat_next_quote: true })
end
# Start quote in the middle of a field (retain this quote pair)
defp build(<<char>> <> rest, [[current_field | previous_fields] | previous_rows], %{quote: char, quoting: false} = config) do
current_row = [current_field <> <<char::utf8>> | previous_fields]
rows = [current_row | previous_rows]
rest |> build(rows, %{ config | quoting: true, eat_next_quote: false })
end
# End quote and don't retain the quote character (full-field quoting)
defp build(<<char>> <> rest, rows, %{quote: char, quoting: true, eat_next_quote: true} = config) do
rest |> skip_whitespace |> build(rows, %{ config | quoting: false })
end
# End quote and retain the quote character (partial field quoting)
defp build(<<char>> <> rest, [[current_field | previous_fields] | previous_rows], %{quote: char, quoting: true, eat_next_quote: false} = config) do
current_row = [current_field <> <<char::utf8>> | previous_fields]
rows = [current_row | previous_rows]
rest |> build(rows, %{ config | quoting: false })
end
# NEWLINE
defp build(<<rt,nl>> <> rest, [[current_field | previous_fields] | previous_rows], %{return: rt, newline: nl, quoting: false} = config) do
build_newline(rest, current_field, previous_fields, previous_rows, config)
end
defp build(<<rt>> <> rest, [[current_field | previous_fields] | previous_rows], %{return: rt, quoting: false} = config) do
build_newline(rest, current_field, previous_fields, previous_rows, config)
end
defp build(<<nl>> <> rest, [[current_field | previous_fields] | previous_rows], %{newline: nl, quoting: false} = config) do
build_newline(rest, current_field, previous_fields, previous_rows, config)
end
# NORMAL CHARACTER
# Starting the first field in the current row
defp build(<<char>> <> rest, [[] | previous_rows], config) do
current_row = [<<char>>]
rows = [current_row | previous_rows]
rest |> build(rows, config)
end
# Adding to the last field in the current row
defp build(<<char>> <> rest, [[current_field | previous_fields] | previous_rows], config) do
current_row = [current_field <> <<char>> | previous_fields]
rows = [current_row | previous_rows]
rest |> build(rows, config)
end
# EOF
defp build("", rows, config), do: {rows, config}
defp build_newline(rest, current_field, previous_fields, previous_rows, config) do
current_row = [current_field |> String.rstrip | previous_fields]
rows = [new_row | [current_row | previous_rows]]
rest |> skip_whitespace |> build(rows, config)
end
defp rstrip([[""] | rows]), do: rows
defp rstrip(rows), do: rows
defp skip_whitespace(<<char>> <> rest) when char in '\s\r' do
skip_whitespace(rest)
end
defp skip_whitespace(string), do: string
defp skip_dotall(<<char>> <> rest) when char in '\s\r\n\t' do
skip_dotall(rest)
end
defp skip_dotall(string), do: string
defp new_field, do: ""
defp new_row, do: [new_field]
end
| 40.562963 | 149 | 0.660884 |
9e48e3353e81e3f8e0e4a41674189c083bb2acc7 | 2,940 | ex | Elixir | lib/repg2/worker.ex | paulobezerr/repg2 | 824734df292dadf1705cf1709550e01e13170b5a | [
"Apache-2.0"
] | 25 | 2018-02-17T16:15:36.000Z | 2021-05-28T01:36:39.000Z | lib/repg2/worker.ex | paulobezerr/repg2 | 824734df292dadf1705cf1709550e01e13170b5a | [
"Apache-2.0"
] | null | null | null | lib/repg2/worker.ex | paulobezerr/repg2 | 824734df292dadf1705cf1709550e01e13170b5a | [
"Apache-2.0"
] | 4 | 2018-09-27T08:54:07.000Z | 2020-11-09T08:54:25.000Z | defmodule RePG2.Worker do
@moduledoc false
use GenServer
require Logger
alias RePG2.Impl
def start_link(), do: GenServer.start_link(__MODULE__, [], name: __MODULE__)
@doc """
Make a globally locked multi call to all `RePG2.Worker`s in the cluster.
This function acquires a cluster-wide lock on the group `name`, ensuring
that only one node can update the group at a time. Then, a
`GenServer.multi_call` is made to all `RePG2.Worker`s with the given
`message`.
"""
def globally_locked_multi_call(name, message) do
:global.trans {{__MODULE__, name}, self()}, fn ->
all_nodes = Node.list([:visible, :this])
GenServer.multi_call(all_nodes, RePG2.Worker, message)
end
end
def init([]) do
nodes = Node.list()
:ok = :net_kernel.monitor_nodes(true)
for new_node <- nodes do
send worker_for(new_node), {:new_repg2, Node.self()}
send self(), {:nodeup, new_node}
end
:ok = Impl.init()
{:ok, %{}}
end
def handle_call({:create, name}, _from, state) do
Impl.assure_group(name)
{:reply, :ok, state}
end
def handle_call({:join, name, pid}, _from, state) do
if Impl.group_exists?(name), do: Impl.join_group(name, pid)
{:reply, :ok, state}
end
def handle_call({:leave, name, pid}, _from, state) do
if Impl.group_exists?(name), do: Impl.leave_group(name, pid)
{:reply, :ok, state}
end
def handle_call({:delete, name}, _from, state) do
Impl.delete_group(name)
{:reply, :ok, state}
end
def handle_call(message, from, state) do
_ = Logger.warn(
"""
The RePG2 server received an unexpected message:
handle_call(#{inspect message}, #{inspect from}, #{inspect state})
"""
)
{:noreply, state}
end
def handle_cast({:exchange, _node, all_memberships}, state) do
for {name, members} <- all_memberships,
Impl.assure_group(name),
member <- members -- Impl.group_members(name),
do: Impl.join_group(name, member)
{:noreply, state}
end
def handle_cast(_, state), do: {:noreply, state}
def handle_info({:DOWN, _ref, :process, pid, _info}, state) do
for name <- Impl.member_groups(pid),
membership <- Impl.memberships_in_group(pid, name),
do: Impl.leave_group(name, membership)
{:noreply, state}
end
def handle_info({:nodeup, new_node}, state) do
exchange_all_memberships(new_node)
{:noreply, state}
end
def handle_info({:new_repg2, new_node}, state) do
exchange_all_memberships(new_node)
{:noreply, state}
end
def handle_info(_, state), do: {:noreply, state}
defp exchange_all_memberships(node_name) do
all_memberships =
for group <- Impl.all_groups(), do: {group, Impl.group_members(group)}
node_name
|> worker_for()
|> GenServer.cast({:exchange, Node.self(), all_memberships})
end
defp worker_for(node_name), do: {__MODULE__, node_name}
end
| 23.149606 | 78 | 0.655102 |
9e48edfcfb1e98b17284be531af7be09d744c5ff | 526 | exs | Elixir | priv/repo/migrations/20211125042259_create_meetings.exs | ashton314/rostrum2 | e392190b27f7dae4cc2de3668c1f4fea5cca63c1 | [
"MIT"
] | null | null | null | priv/repo/migrations/20211125042259_create_meetings.exs | ashton314/rostrum2 | e392190b27f7dae4cc2de3668c1f4fea5cca63c1 | [
"MIT"
] | 3 | 2021-11-25T05:44:03.000Z | 2021-11-26T06:33:53.000Z | priv/repo/migrations/20211125042259_create_meetings.exs | ashton314/rostrum2 | e392190b27f7dae4cc2de3668c1f4fea5cca63c1 | [
"MIT"
] | null | null | null | defmodule Rostrum.Repo.Migrations.CreateMeetings do
use Ecto.Migration
def change do
create table(:meetings, primary_key: false) do
add :id, :binary_id, primary_key: true
add :date, :naive_datetime
add :title, :string
add :unit_id, references(:units, on_delete: :nothing, type: :binary_id)
add :creator_id, references(:users, on_delete: :nothing, type: :binary_id)
timestamps()
end
create index(:meetings, [:unit_id])
create index(:meetings, [:creator_id])
end
end
| 27.684211 | 80 | 0.68251 |
9e49306b35bb17163ccab92acf6a38b5a4781ff2 | 1,061 | exs | Elixir | config/prod.exs | ruhrjs/ruhrjs-karaoke | 93c6bee18873ead2854e1bbf835a665cc782f7da | [
"Unlicense"
] | null | null | null | config/prod.exs | ruhrjs/ruhrjs-karaoke | 93c6bee18873ead2854e1bbf835a665cc782f7da | [
"Unlicense"
] | null | null | null | config/prod.exs | ruhrjs/ruhrjs-karaoke | 93c6bee18873ead2854e1bbf835a665cc782f7da | [
"Unlicense"
] | null | null | null | use Mix.Config
require Logger
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :platform, PlatformWeb.Endpoint,
http: [:inet6, port: System.get_env("PORT") || 4000],
url: [
scheme: "https",
host: System.get_env("PLATFORM_HOST") || "example.com",
port: 443
],
server: true,
# force_ssl: [rewrite_on: [:x_forwarded_proto]],
secret_key_base: System.get_env("SECRET_KEY_BASE") || Logger.info("Please set env variable SECRET_KEY_BASE"),
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
config :platform, Platform.Repo,
adapter: Ecto.Adapters.Postgres,
url: System.get_env("DATABASE_URL") || ""
| 34.225806 | 111 | 0.739868 |
9e497879d6e9723fad70b63eff6658f46eaea921 | 365 | ex | Elixir | lib/cforum/jobs/appsignal.ex | jrieger/cforum_ex | 61f6ce84708cb55bd0feedf69853dae64146a7a0 | [
"MIT"
] | 16 | 2019-04-04T06:33:33.000Z | 2021-08-16T19:34:31.000Z | lib/cforum/jobs/appsignal.ex | jrieger/cforum_ex | 61f6ce84708cb55bd0feedf69853dae64146a7a0 | [
"MIT"
] | 294 | 2019-02-10T11:10:27.000Z | 2022-03-30T04:52:53.000Z | lib/cforum/jobs/appsignal.ex | jrieger/cforum_ex | 61f6ce84708cb55bd0feedf69853dae64146a7a0 | [
"MIT"
] | 10 | 2019-02-10T10:39:24.000Z | 2021-07-06T11:46:05.000Z | defmodule Cforum.Jobs.Appsignal do
require Logger
def handle_event([:oban, :job, :exception], _measurement, meta, _) do
Logger.error("Error executing job: #{meta.kind} (#{inspect(meta.error)})\n\n#{inspect(meta.stacktrace)}")
if meta.attempt >= meta.max_attempts,
do: Appsignal.send_error(meta[:kind], meta[:error], meta[:stacktrace])
end
end
| 33.181818 | 109 | 0.69863 |
9e4979c7e30e66aeef4f4f67258a1881baf84280 | 3,259 | ex | Elixir | lib/aba_cli/score.ex | jscheel42/aba_cli | c66c4921f970dbc8acea4f273d9802c978d2f123 | [
"MIT"
] | null | null | null | lib/aba_cli/score.ex | jscheel42/aba_cli | c66c4921f970dbc8acea4f273d9802c978d2f123 | [
"MIT"
] | null | null | null | lib/aba_cli/score.ex | jscheel42/aba_cli | c66c4921f970dbc8acea4f273d9802c978d2f123 | [
"MIT"
] | null | null | null | defmodule AbaCLI.Score do
def db_update_score(score, player_db) do
level = Map.get(score, "level")
kills = Map.get(score, "kills")
assists = Map.get(score, "assists")
takedowns = Map.get(score, "takedowns")
deaths = Map.get(score, "deaths")
highest_kill_streak = Map.get(score, "highest_kill_streak")
hero_damage = Map.get(score, "hero_damage")
siege_damage = Map.get(score, "siege_damage")
structure_damage = Map.get(score, "structure_damage")
minion_damage = Map.get(score, "minion_damage")
creep_damage = Map.get(score, "creep_damage")
summon_damage = Map.get(score, "summon_damage")
time_cc_enemy_heroes = Map.get(score, "time_cc_enemy_heroes")
healing = Map.get(score, "healing")
self_healing = Map.get(score, "self_healing")
damage_taken = Map.get(score, "damage_taken")
experience_contribution = Map.get(score, "experience_contribution")
town_kills = Map.get(score, "town_kills")
time_spent_dead = Map.get(score, "time_spent_dead")
merc_camp_captures = Map.get(score, "merc_camp_captures")
watch_tower_captures = Map.get(score, "watch_tower_captures")
meta_experience = Map.get(score, "meta_experience")
{:ok, _} =
case AbaModel.Repo.get_by(AbaModel.Score, [player_id: player_db.id]) do
nil -> %AbaModel.Score{
level: level,
kills: kills,
assists: assists,
takedowns: takedowns,
deaths: deaths,
highest_kill_streak: highest_kill_streak,
hero_damage: hero_damage,
siege_damage: siege_damage,
structure_damage: structure_damage,
minion_damage: minion_damage,
creep_damage: creep_damage,
summon_damage: summon_damage,
time_cc_enemy_heroes: time_cc_enemy_heroes,
healing: healing,
self_healing: self_healing,
damage_taken: damage_taken,
experience_contribution: experience_contribution,
town_kills: town_kills,
time_spent_dead: time_spent_dead,
merc_camp_captures: merc_camp_captures,
watch_tower_captures: watch_tower_captures,
meta_experience: meta_experience
}
score -> score
end
|> AbaModel.Repo.preload(:player)
|> AbaModel.Score.changeset(%{
level: level,
kills: kills,
assists: assists,
takedowns: takedowns,
deaths: deaths,
highest_kill_streak: highest_kill_streak,
hero_damage: hero_damage,
siege_damage: siege_damage,
structure_damage: structure_damage,
minion_damage: minion_damage,
creep_damage: creep_damage,
summon_damage: summon_damage,
time_cc_enemy_heroes: time_cc_enemy_heroes,
healing: healing,
self_healing: self_healing,
damage_taken: damage_taken,
experience_contribution: experience_contribution,
town_kills: town_kills,
time_spent_dead: time_spent_dead,
merc_camp_captures: merc_camp_captures,
watch_tower_captures: watch_tower_captures,
meta_experience: meta_experience
})
|> Ecto.Changeset.put_assoc(:player, player_db)
|> AbaModel.Repo.insert_or_update()
end
end | 39.26506 | 77 | 0.670451 |
9e499ce88000f34ae886c3636218ad709d2502e2 | 501 | ex | Elixir | lib/phx_crud_users_web/views/error_view.ex | LeonardoSSev/phx-crud-users | 52dabaae0c81adeee39afa48eb17331de261d3c4 | [
"MIT"
] | null | null | null | lib/phx_crud_users_web/views/error_view.ex | LeonardoSSev/phx-crud-users | 52dabaae0c81adeee39afa48eb17331de261d3c4 | [
"MIT"
] | null | null | null | lib/phx_crud_users_web/views/error_view.ex | LeonardoSSev/phx-crud-users | 52dabaae0c81adeee39afa48eb17331de261d3c4 | [
"MIT"
] | null | null | null | defmodule PhxCrudUsersWeb.ErrorView do
use PhxCrudUsersWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 29.470588 | 61 | 0.740519 |
9e49a138b9d3d95b2a95452232176fd59cef17d9 | 1,949 | ex | Elixir | clients/document_ai/lib/google_api/document_ai/v1beta2/model/google_cloud_documentai_v1beta3_disable_processor_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/document_ai/lib/google_api/document_ai/v1beta2/model/google_cloud_documentai_v1beta3_disable_processor_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/document_ai/lib/google_api/document_ai/v1beta2/model/google_cloud_documentai_v1beta3_disable_processor_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta3DisableProcessorMetadata do
@moduledoc """
The long running operation metadata for disable processor method.
## Attributes
* `commonMetadata` (*type:* `GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta3CommonOperationMetadata.t`, *default:* `nil`) - The basic metadata of the long running operation.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:commonMetadata =>
GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta3CommonOperationMetadata.t()
| nil
}
field(:commonMetadata,
as: GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta3CommonOperationMetadata
)
end
defimpl Poison.Decoder,
for: GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta3DisableProcessorMetadata do
def decode(value, options) do
GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta3DisableProcessorMetadata.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta3DisableProcessorMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.803571 | 193 | 0.770139 |
9e49ccb9010c10ede4338701301a5fd307365530 | 370 | exs | Elixir | test/day3_ch1.exs | AJPcodes/advent_of_code_2018 | 1c24bd41cd3b8e556e91e7d1e8ff4bbb1edf5235 | [
"MIT"
] | 1 | 2018-12-04T19:54:13.000Z | 2018-12-04T19:54:13.000Z | test/day3_ch1.exs | AJPcodes/advent_of_code_2018 | 1c24bd41cd3b8e556e91e7d1e8ff4bbb1edf5235 | [
"MIT"
] | null | null | null | test/day3_ch1.exs | AJPcodes/advent_of_code_2018 | 1c24bd41cd3b8e556e91e7d1e8ff4bbb1edf5235 | [
"MIT"
] | null | null | null | defmodule Test.D3.Ch1 do
use ExUnit.Case
# 1288 @ 130,449: 22x26
# 1289 @ 310,489: 14x27
# 1290 @ 528,540: 20x11
test "Parses lines into data structs" do
assert D3.Ch1.parse_line("#1288 @ 130,449: 22x26") == %{
id: 1288,
left: 130,
top: 449,
width: 22,
height: 26
}
end
end
| 18.5 | 60 | 0.502703 |
9e49d8d00d4e980b99e67c19f051cdb8c7f72f04 | 143 | ex | Elixir | chapter_8/todo_supervised/lib/todo/cache/client.ex | librity/elixir_in_action | d2df441ceb7e6a0d3f18bc3ab3c59570125fcdec | [
"MIT"
] | 3 | 2021-04-22T11:55:58.000Z | 2021-08-22T13:19:56.000Z | chapter_8/todo_supervised/lib/todo/cache/client.ex | librity/elixir_in_action | d2df441ceb7e6a0d3f18bc3ab3c59570125fcdec | [
"MIT"
] | null | null | null | chapter_8/todo_supervised/lib/todo/cache/client.ex | librity/elixir_in_action | d2df441ceb7e6a0d3f18bc3ab3c59570125fcdec | [
"MIT"
] | 3 | 2021-04-22T21:19:45.000Z | 2021-08-22T13:20:03.000Z | defmodule Todo.Cache.Client do
def server_process(todo_list_name),
do: GenServer.call(Todo.Cache, {:server_process, todo_list_name})
end
| 28.6 | 69 | 0.783217 |
9e49dacd2c73a660be960a18f35bfe5357719bb2 | 550 | exs | Elixir | apps/location_service/config/config.exs | mbta/crispy-spoon | 7ef28a1a6adc73899b007e334b9220f7a48a60fa | [
"MIT"
] | null | null | null | apps/location_service/config/config.exs | mbta/crispy-spoon | 7ef28a1a6adc73899b007e334b9220f7a48a60fa | [
"MIT"
] | null | null | null | apps/location_service/config/config.exs | mbta/crispy-spoon | 7ef28a1a6adc73899b007e334b9220f7a48a60fa | [
"MIT"
] | null | null | null | use Mix.Config
config :location_service, :http_pool, :google_http_pool
config :location_service,
google_api_key: System.get_env("GOOGLE_API_KEY"),
google_client_id: System.get_env("GOOGLE_MAPS_CLIENT_ID") || "",
google_signing_key: System.get_env("GOOGLE_MAPS_SIGNING_KEY") || "",
geocode: {:system, "LOCATION_SERVICE", :aws},
reverse_geocode: {:system, "LOCATION_SERVICE", :aws},
autocomplete: {:system, "LOCATION_SERVICE", :aws},
aws_index_prefix: {:system, "AWS_PLACE_INDEX_PREFIX", "dotcom-dev"}
import_config "#{Mix.env()}.exs"
| 36.666667 | 70 | 0.745455 |
9e4a06c21ce36becf146be2f44b6af5262a4d8bf | 120 | ex | Elixir | apps/peedy_f/lib/behaviours/watermarker.ex | poteto/peedy | df9d5ee7fcbceb30b5939b36224a257249a180ea | [
"Apache-2.0"
] | 34 | 2017-05-07T08:50:59.000Z | 2021-11-25T00:27:11.000Z | apps/peedy_f/lib/behaviours/watermarker.ex | poteto/peedy | df9d5ee7fcbceb30b5939b36224a257249a180ea | [
"Apache-2.0"
] | null | null | null | apps/peedy_f/lib/behaviours/watermarker.ex | poteto/peedy | df9d5ee7fcbceb30b5939b36224a257249a180ea | [
"Apache-2.0"
] | 7 | 2017-05-10T12:42:30.000Z | 2021-11-03T01:21:02.000Z | defmodule PeedyF.WatermarkerBehaviour do
alias PeedyF.Watermark
@callback new(text :: String.t) :: Watermark.t
end
| 20 | 48 | 0.766667 |
9e4a070e2e0350be9312ba937e0071119f1f8846 | 902 | ex | Elixir | clients/admin/lib/google_api/admin/datatransfer_v1/metadata.ex | dsdshcym/elixir-google-api | 2d9eef7207bb422d7ecfc1ec780721c6abd0ac81 | [
"Apache-2.0"
] | null | null | null | clients/admin/lib/google_api/admin/datatransfer_v1/metadata.ex | dsdshcym/elixir-google-api | 2d9eef7207bb422d7ecfc1ec780721c6abd0ac81 | [
"Apache-2.0"
] | null | null | null | clients/admin/lib/google_api/admin/datatransfer_v1/metadata.ex | dsdshcym/elixir-google-api | 2d9eef7207bb422d7ecfc1ec780721c6abd0ac81 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Admin.Datatransfer_v1 do
@moduledoc """
API client metadata for GoogleApi.Admin.Datatransfer_v1.
"""
@discovery_revision "20210420"
def discovery_revision(), do: @discovery_revision
end
| 33.407407 | 74 | 0.763858 |
9e4a088a1e45ba14328e93e975ea6d615725716c | 2,584 | ex | Elixir | clients/analytics_reporting/lib/google_api/analytics_reporting/v4/model/segment_metric_filter.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/analytics_reporting/lib/google_api/analytics_reporting/v4/model/segment_metric_filter.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/analytics_reporting/lib/google_api/analytics_reporting/v4/model/segment_metric_filter.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.AnalyticsReporting.V4.Model.SegmentMetricFilter do
@moduledoc """
Metric filter to be used in a segment filter clause.
## Attributes
* `comparisonValue` (*type:* `String.t`, *default:* `nil`) - The value to compare against. If the operator is `BETWEEN`, this value is
treated as minimum comparison value.
* `maxComparisonValue` (*type:* `String.t`, *default:* `nil`) - Max comparison value is only used for `BETWEEN` operator.
* `metricName` (*type:* `String.t`, *default:* `nil`) - The metric that will be filtered on. A `metricFilter` must contain a
metric name.
* `operator` (*type:* `String.t`, *default:* `nil`) - Specifies is the operation to perform to compare the metric. The default
is `EQUAL`.
* `scope` (*type:* `String.t`, *default:* `nil`) - Scope for a metric defines the level at which that metric is defined. The
specified metric scope must be equal to or greater than its primary scope
as defined in the data model. The primary scope is defined by if the
segment is selecting users or sessions.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:comparisonValue => String.t(),
:maxComparisonValue => String.t(),
:metricName => String.t(),
:operator => String.t(),
:scope => String.t()
}
field(:comparisonValue)
field(:maxComparisonValue)
field(:metricName)
field(:operator)
field(:scope)
end
defimpl Poison.Decoder, for: GoogleApi.AnalyticsReporting.V4.Model.SegmentMetricFilter do
def decode(value, options) do
GoogleApi.AnalyticsReporting.V4.Model.SegmentMetricFilter.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AnalyticsReporting.V4.Model.SegmentMetricFilter do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.753846 | 138 | 0.709752 |
9e4a0a8bf15a56378a84f7deae301899131b5ddf | 795 | ex | Elixir | lib/nostrum/cache/guild/guild_register.ex | jos-b/nostrum | baf5c9de9f17c3bd99c5c06a454e03cc448aad1c | [
"MIT"
] | 1 | 2020-12-08T23:47:17.000Z | 2020-12-08T23:47:17.000Z | lib/nostrum/cache/guild/guild_register.ex | jos-b/nostrum | baf5c9de9f17c3bd99c5c06a454e03cc448aad1c | [
"MIT"
] | null | null | null | lib/nostrum/cache/guild/guild_register.ex | jos-b/nostrum | baf5c9de9f17c3bd99c5c06a454e03cc448aad1c | [
"MIT"
] | 1 | 2020-09-03T13:55:55.000Z | 2020-09-03T13:55:55.000Z | defmodule Nostrum.Cache.Guild.GuildRegister do
@moduledoc false
alias Nostrum.Cache.Guild.GuildSupervisor
alias Nostrum.Cache.GuildCache
def lookup(id) do
case Registry.lookup(GuildRegistry, id) do
[{pid, _}] ->
{:ok, pid}
[] ->
{:error, :id_not_found_on_guild_lookup}
end
end
def lookup!(id) do
case Registry.lookup(GuildRegistry, id) do
[{pid, _}] ->
pid
[] ->
raise(Nostrum.Error.CacheError, "No entry in guild registry for id #{id}")
end
end
def create_guild_process(id, guild) do
case GuildSupervisor.start_child(id, guild) do
{:ok, _pid} ->
{:ok, guild}
{:error, {:already_registered, _pid}} ->
GuildCache.get(id)
other ->
other
end
end
end
| 19.875 | 82 | 0.6 |
9e4a43f812b0cca111375c6de92895b510cd2494 | 1,924 | ex | Elixir | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_update_deidentify_template_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_update_deidentify_template_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_update_deidentify_template_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2UpdateDeidentifyTemplateRequest do
@moduledoc """
Request message for UpdateDeidentifyTemplate.
## Attributes
* `deidentifyTemplate` (*type:* `GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2DeidentifyTemplate.t`, *default:* `nil`) - New DeidentifyTemplate value.
* `updateMask` (*type:* `String.t`, *default:* `nil`) - Mask to control which fields get updated.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:deidentifyTemplate => GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2DeidentifyTemplate.t(),
:updateMask => String.t()
}
field(:deidentifyTemplate, as: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2DeidentifyTemplate)
field(:updateMask)
end
defimpl Poison.Decoder,
for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2UpdateDeidentifyTemplateRequest do
def decode(value, options) do
GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2UpdateDeidentifyTemplateRequest.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2UpdateDeidentifyTemplateRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.981818 | 150 | 0.755198 |
9e4a9d8f04fdbb9b7b229d8cc38f76cbf483ffb4 | 539 | ex | Elixir | lib/hl7/2.4/segments/om3.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.4/segments/om3.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.4/segments/om3.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | defmodule HL7.V2_4.Segments.OM3 do
@moduledoc false
require Logger
alias HL7.V2_4.{DataTypes}
use HL7.Segment,
fields: [
segment: nil,
sequence_number_test_observation_master_file: nil,
preferred_coding_system: DataTypes.Ce,
valid_coded_answers_: DataTypes.Ce,
normal_text_codes_for_categorical_observations: DataTypes.Ce,
abnormal_text_codes_for_categorical_observations: DataTypes.Ce,
critical_text_codes_for_categorical_observations: DataTypes.Ce,
value_type: nil
]
end
| 28.368421 | 69 | 0.758813 |
9e4aa509a5fa167c1a37c513616d15b49f70ffc5 | 3,396 | ex | Elixir | lib/tesla/adapter/httpc.ex | hasclass/tesla | c1ee0e3417ea269124869ca1ff33fd2364391154 | [
"MIT"
] | 1 | 2020-12-21T03:45:23.000Z | 2020-12-21T03:45:23.000Z | lib/tesla/adapter/httpc.ex | hasclass/tesla | c1ee0e3417ea269124869ca1ff33fd2364391154 | [
"MIT"
] | null | null | null | lib/tesla/adapter/httpc.ex | hasclass/tesla | c1ee0e3417ea269124869ca1ff33fd2364391154 | [
"MIT"
] | null | null | null | defmodule Tesla.Adapter.Httpc do
@moduledoc """
Adapter for [httpc](http://erlang.org/doc/man/httpc.html)
This is the default adapter.
**NOTE** Tesla overrides default autoredirect value with false to ensure
consistency between adapters
"""
@behaviour Tesla.Adapter
import Tesla.Adapter.Shared, only: [stream_to_fun: 1, next_chunk: 1]
alias Tesla.Multipart
@override_defaults autoredirect: false
@http_opts ~w(timeout connect_timeout ssl essl autoredirect proxy_auth version relaxed url_encode)a
def call(env, opts) do
opts = Tesla.Adapter.opts(@override_defaults, env, opts)
with {:ok, {status, headers, body}} <- request(env, opts) do
{:ok, format_response(env, status, headers, body)}
end
end
defp format_response(env, {_, status, _}, headers, body) do
%{env | status: status, headers: format_headers(headers), body: format_body(body)}
end
# from http://erlang.org/doc/man/httpc.html
# headers() = [header()]
# header() = {field(), value()}
# field() = string()
# value() = string()
defp format_headers(headers) do
for {key, value} <- headers do
{String.downcase(to_string(key)), to_string(value)}
end
end
# from http://erlang.org/doc/man/httpc.html
# string() = list of ASCII characters
# Body = string() | binary()
defp format_body(data) when is_list(data), do: IO.iodata_to_binary(data)
defp format_body(data) when is_binary(data), do: data
defp request(env, opts) do
content_type = to_charlist(Tesla.get_header(env, "content-type") || "")
handle(
request(
env.method,
Tesla.build_url(env.url, env.query) |> to_charlist,
Enum.map(env.headers, fn {k, v} -> {to_charlist(k), to_charlist(v)} end),
content_type,
env.body,
Keyword.split(opts, @http_opts)
)
)
end
# fix for # see https://github.com/teamon/tesla/issues/147
defp request(:delete, url, headers, content_type, nil, {http_opts, opts}) do
request(:delete, url, headers, content_type, "", {http_opts, opts})
end
defp request(method, url, headers, _content_type, nil, {http_opts, opts}) do
:httpc.request(method, {url, headers}, http_opts, opts)
end
defp request(method, url, headers, _content_type, %Multipart{} = mp, opts) do
headers = headers ++ Multipart.headers(mp)
headers = for {key, value} <- headers, do: {to_charlist(key), to_charlist(value)}
{content_type, headers} =
case List.keytake(headers, 'content-type', 0) do
nil -> {'text/plain', headers}
{{_, ct}, headers} -> {ct, headers}
end
body = stream_to_fun(Multipart.body(mp))
request(method, url, headers, to_charlist(content_type), body, opts)
end
defp request(method, url, headers, content_type, %Stream{} = body, opts) do
fun = stream_to_fun(body)
request(method, url, headers, content_type, fun, opts)
end
defp request(method, url, headers, content_type, body, opts) when is_function(body) do
body = {:chunkify, &next_chunk/1, body}
request(method, url, headers, content_type, body, opts)
end
defp request(method, url, headers, content_type, body, {http_opts, opts}) do
:httpc.request(method, {url, headers, content_type, body}, http_opts, opts)
end
defp handle({:error, {:failed_connect, _}}), do: {:error, :econnrefused}
defp handle(response), do: response
end
| 32.970874 | 101 | 0.669317 |
9e4aa7ee4bab05a2c4c2044f4ce0b312cdde550d | 19,885 | ex | Elixir | lib/ecto/association.ex | rbishop/ecto | a8a3215c9e2e35f7556f54c8d47d78a3670796d8 | [
"Apache-2.0"
] | null | null | null | lib/ecto/association.ex | rbishop/ecto | a8a3215c9e2e35f7556f54c8d47d78a3670796d8 | [
"Apache-2.0"
] | null | null | null | lib/ecto/association.ex | rbishop/ecto | a8a3215c9e2e35f7556f54c8d47d78a3670796d8 | [
"Apache-2.0"
] | null | null | null | import Ecto.Query, only: [from: 2, join: 4, distinct: 3, select: 3]
defmodule Ecto.Association.NotLoaded do
@moduledoc """
Struct returned by one to one associations when they are not loaded.
The fields are:
* `__field__` - the association field in `owner`
* `__owner__` - the model that owns the association
* `__cardinality__` - the cardinality of the association
"""
defstruct [:__field__, :__owner__, :__cardinality__]
defimpl Inspect do
def inspect(not_loaded, _opts) do
msg = "association #{inspect not_loaded.__field__} is not loaded"
~s(#Ecto.Association.NotLoaded<#{msg}>)
end
end
end
defmodule Ecto.Association do
@moduledoc false
@type t :: %{__struct__: atom, cardinality: :one | :many,
field: atom, owner_key: atom, owner: atom}
use Behaviour
@doc """
Builds the association struct.
The struct must be defined in the module that implements the
callback and it must contain at least the following keys:
* `:cardinality` - tells if the association is one to one
or one/many to many
* `:field` - tells the field in the owner struct where the
association should be stored
* `:owner` - the owner module of the association
* `:owner_key` - the key in the owner with the association value
"""
defcallback struct(module, field :: atom, opts :: Keyword.t) :: t
@doc """
Builds a model for the given association.
The struct to build from is given as argument in case default values
should be set in the struct.
Invoked by `Ecto.Model.build/3`.
"""
defcallback build(t, Ecto.Model.t, %{atom => term} | [Keyword.t]) :: Ecto.Model.t
@doc """
Returns an association join query.
This callback receives the association struct and it must return
a query that retrieves all associated entries using joins up to
the owner association.
For example, a `has_many :comments` inside a `Post` module would
return:
from c in Comment, join: p in Post, on: c.post_id == p.id
Note all the logic must be expressed inside joins, as fields like
`where` and `order_by` won't be used by the caller.
This callback is invoked when `join: assoc(p, :comments)` is used
inside queries.
"""
defcallback joins_query(t) :: Ecto.Query.t
@doc """
Returns the association query.
This callback receives the association struct and it must return
a query that retrieves all associated entries with the given
values for the owner key.
This callback is used by `Ecto.Model.assoc/2`.
"""
defcallback assoc_query(t, values :: [term]) :: Ecto.Query.t
@doc """
Returns the association query on top of the given query.
This callback receives the association struct and it must return
a query that retrieves all associated entries with the given
values for the owner key.
This callback is used by preloading.
"""
defcallback assoc_query(t, Ecto.Query.t, values :: [term]) :: Ecto.Query.t
@doc """
Returns information used by the preloader.
"""
defcallback preload_info(t) ::
{:assoc, t, atom} | {:through, t, [atom]}
@doc """
Retrieves the association from the given model.
"""
def association_from_model!(model, assoc) do
model.__schema__(:association, assoc) ||
raise ArgumentError, "model #{inspect model} does not have association #{inspect assoc}"
end
@doc """
Checks if an association is loaded.
## Examples
post = Repo.get(Post, 1)
Ecto.Association.loaded?(post.comments) # false
post = post |> Repo.preload(:comments)
Ecto.Association.loaded?(post.comments) # true
"""
def loaded?(association) do
case association do
%Ecto.Association.NotLoaded{} -> false
_ -> true
end
end
@doc """
Returns the association key for the given module with the given suffix.
## Examples
iex> Ecto.Association.association_key(Hello.World, :id)
:world_id
iex> Ecto.Association.association_key(Hello.HTTP, :id)
:http_id
iex> Ecto.Association.association_key(Hello.HTTPServer, :id)
:http_server_id
"""
def association_key(module, suffix) do
prefix = module |> Module.split |> List.last |> underscore
:"#{prefix}_#{suffix}"
end
defp underscore(""), do: ""
defp underscore(<<h, t :: binary>>) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<h, t, rest :: binary>>, _) when h in ?A..?Z and not t in ?A..?Z do
<<?_, to_lower_char(h), t>> <> do_underscore(rest, t)
end
defp do_underscore(<<h, t :: binary>>, prev) when h in ?A..?Z and not prev in ?A..?Z do
<<?_, to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<?-, t :: binary>>, _) do
<<?_>> <> do_underscore(t, ?-)
end
defp do_underscore(<< "..", t :: binary>>, _) do
<<"..">> <> underscore(t)
end
defp do_underscore(<<?.>>, _), do: <<?.>>
defp do_underscore(<<?., t :: binary>>, _) do
<<?/>> <> underscore(t)
end
defp do_underscore(<<h, t :: binary>>, _) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<>>, _) do
<<>>
end
defp to_lower_char(char) when char in ?A..?Z, do: char + 32
defp to_lower_char(char), do: char
@doc """
Retrieves related module from queryable.
## Examples
iex> Ecto.Association.related_from_query({"custom_source", Model})
Model
iex> Ecto.Association.related_from_query(Model)
Model
iex> Ecto.Association.related_from_query("wrong")
** (ArgumentError) association queryable must be a model or {source, model}, got: "wrong"
"""
def related_from_query(atom) when is_atom(atom), do: atom
def related_from_query({source, model}) when is_binary(source) and is_atom(model), do: model
def related_from_query(queryable) do
raise ArgumentError, "association queryable must be a model " <>
"or {source, model}, got: #{inspect queryable}"
end
@doc """
Merges source from query into to the given model.
In case the query does not have a source, returns
the model unchanged.
"""
def merge_source(model, query)
def merge_source(struct, {source, _}) do
Ecto.Model.put_source(struct, source)
end
def merge_source(struct, _query) do
struct
end
end
defmodule Ecto.Association.Has do
@moduledoc """
The association struct for `has_one` and `has_many` associations.
Its fields are:
* `cardinality` - The association cardinality
* `field` - The name of the association field on the model
* `owner` - The model where the association was defined
* `related` - The model that is associated
* `owner_key` - The key on the `owner` model used for the association
* `related_key` - The key on the `related` model used for the association
* `queryable` - The real query to use for querying association
* `on_delete` - The action taken on associations when model is deleted
* `on_replace` - The action taken on associations when model is replaced
* `on_cast` - The changeset function to call during casting
* `defaults` - Default fields used when building the association
"""
@behaviour Ecto.Association
@on_delete_opts [:nothing, :fetch_and_delete, :nilify_all, :delete_all]
@on_replace_opts [:delete, :nilify]
defstruct [:cardinality, :field, :owner, :related, :owner_key, :related_key,
:queryable, :on_delete, :on_replace, :on_cast, defaults: []]
@doc false
def struct(module, name, opts) do
ref =
cond do
ref = opts[:references] ->
ref
primary_key = Module.get_attribute(module, :primary_key) ->
elem(primary_key, 0)
true ->
raise ArgumentError, "need to set :references option for " <>
"association #{inspect name} when model has no primary key"
end
unless Module.get_attribute(module, :ecto_fields)[ref] do
raise ArgumentError, "model does not have the field #{inspect ref} used by " <>
"association #{inspect name}, please set the :references option accordingly"
end
queryable = Keyword.fetch!(opts, :queryable)
related = Ecto.Association.related_from_query(queryable)
if opts[:through] do
raise ArgumentError, "invalid association #{inspect name}. When using the :through " <>
"option, the model should not be passed as second argument"
end
on_delete = Keyword.get(opts, :on_delete, :nothing)
on_replace = Keyword.get(opts, :on_replace, :delete)
on_cast = Keyword.get(opts, :on_cast, :changeset)
unless on_delete in @on_delete_opts do
raise ArgumentError, "invalid :on_delete option for #{inspect name}. " <>
"The only valid options are: " <>
Enum.map_join(@on_delete_opts, ", ", &"`#{inspect &1}`")
end
unless on_replace in @on_replace_opts do
raise ArgumentError, "invalid `:on_replace` option for #{inspect name}. " <>
"The only valid options are: " <>
Enum.map_join(@on_replace_opts, ", ", &"`#{inspect &1}`")
end
%__MODULE__{
field: name,
cardinality: Keyword.fetch!(opts, :cardinality),
owner: module,
related: related,
owner_key: ref,
related_key: opts[:foreign_key] || Ecto.Association.association_key(module, ref),
queryable: queryable,
on_delete: on_delete,
on_replace: on_replace,
on_cast: on_cast,
defaults: opts[:defaults] || []
}
end
@doc false
def build(%{related: related, owner_key: owner_key, related_key: related_key,
queryable: queryable, defaults: defaults}, struct, attributes) do
related
|> struct(defaults)
|> struct(attributes)
|> Map.put(related_key, Map.get(struct, owner_key))
|> Ecto.Association.merge_source(queryable)
end
@doc false
def joins_query(refl) do
from o in refl.owner,
join: q in ^refl.queryable,
on: field(q, ^refl.related_key) == field(o, ^refl.owner_key)
end
@doc false
def assoc_query(refl, values) do
assoc_query(refl, refl.queryable, values)
end
@doc false
def assoc_query(refl, query, values) do
from x in query,
where: field(x, ^refl.related_key) in ^values
end
@doc false
def preload_info(refl) do
{:assoc, refl, refl.related_key}
end
@behaviour Ecto.Changeset.Relation
@doc false
def on_replace(%{on_replace: :delete}, changeset) do
{:delete, changeset}
end
def on_replace(%{on_replace: :nilify, related_key: related_key}, changeset) do
changeset = update_in changeset.changes, &Map.put(&1, related_key, nil)
{:update, changeset}
end
@doc false
# TODO: This should be spec'ed somewhere
def on_repo_action(assoc, changeset, parent, _adapter, repo, repo_action, opts) do
%{action: action, changes: changes} = changeset
check_action!(action, repo_action, assoc)
{key, value} = parent_key(assoc, parent)
changeset = update_parent_key(changeset, action, key, value)
case apply(repo, action, [changeset, opts]) do
{:ok, _} = ok ->
maybe_replace_one!(assoc, changeset, parent, repo, opts)
if action == :delete, do: {:ok, nil}, else: ok
{:error, changeset} ->
original = Map.get(changes, key)
{:error, update_in(changeset.changes, &Map.put(&1, key, original))}
end
end
defp update_parent_key(changeset, :delete, _key, _value),
do: changeset
defp update_parent_key(changeset, _action, key, value),
do: Ecto.Changeset.put_change(changeset, key, value)
defp parent_key(%{owner_key: owner_key, related_key: related_key}, owner) do
{related_key, Map.get(owner, owner_key)}
end
defp check_action!(:delete, :insert, %{related: model}),
do: raise(ArgumentError, "got action :delete in changeset for associated #{inspect model} while inserting")
defp check_action!(_, _, _), do: :ok
defp maybe_replace_one!(%{cardinality: :one, field: field} = assoc,
%{action: :insert}, parent, repo, opts) do
case Map.get(parent, field) do
%Ecto.Association.NotLoaded{} ->
:ok
nil ->
:ok
previous ->
{action, changeset} = on_replace(assoc, Ecto.Changeset.change(previous))
case apply(repo, action, [changeset, opts]) do
{:ok, _} ->
:ok
{:error, changeset} ->
raise Ecto.InvalidChangesetError, action: action, changeset: changeset
end
end
end
defp maybe_replace_one!(_, _, _, _, _), do: :ok
end
defmodule Ecto.Association.HasThrough do
@moduledoc """
The association struct for `has_one` and `has_many` through associations.
Its fields are:
* `cardinality` - The association cardinality
* `field` - The name of the association field on the model
* `owner` - The model where the association was defined
* `owner_key` - The key on the `owner` model used for the association
* `through` - The through associations
"""
alias Ecto.Query.JoinExpr
@behaviour Ecto.Association
defstruct [:cardinality, :field, :owner, :owner_key, :through]
@doc false
def struct(module, name, opts) do
through = Keyword.fetch!(opts, :through)
refl =
case through do
[h,_|_] ->
Module.get_attribute(module, :ecto_assocs)[h]
_ ->
raise ArgumentError, ":through expects a list with at least two entries: " <>
"the association in the current module and one step through, got: #{inspect through}"
end
unless refl do
raise ArgumentError, "model does not have the association #{inspect hd(through)} " <>
"used by association #{inspect name}, please ensure the association exists and " <>
"is defined before the :through one"
end
%__MODULE__{
field: name,
cardinality: Keyword.fetch!(opts, :cardinality),
through: through,
owner: module,
owner_key: refl.owner_key,
}
end
@doc false
def build(%{field: name}, %{__struct__: struct}, _attributes) do
raise ArgumentError,
"cannot build through association #{inspect name} for #{inspect struct}. " <>
"Instead build the intermediate steps explicitly."
end
@doc false
def preload_info(refl) do
{:through, refl, refl.through}
end
@doc false
def joins_query(%{owner: owner, through: through}) do
joins_query(owner, through, 0)
end
defp joins_query(query, through, counter) do
Enum.reduce(through, {query, counter}, fn current, {acc, counter} ->
{join(acc, :inner, [x: counter], assoc(x, ^current)), counter + 1}
end) |> elem(0)
end
@doc false
def assoc_query(refl, values) do
assoc_query(refl, %Ecto.Query{from: {"join expression", nil}}, values)
end
@doc false
def assoc_query(%{owner: owner, through: [h|t]}, %Ecto.Query{} = query, values) do
refl = owner.__schema__(:association, h)
# Find the position for upcoming joins
position = length(query.joins) + 1
# The first association must become a join,
# so we convert its where (that comes from assoc_query)
# to a join expression.
#
# Note we are being restrictive on the format
# expected from assoc_query.
join = assoc_to_join(refl.__struct__.assoc_query(refl, values), position)
# Add the new join to the query and traverse the remaining
# joins that will start counting from the added join position.
query =
%{query | joins: query.joins ++ [join]}
|> joins_query(t, position)
|> Ecto.Query.Planner.prepare_sources()
# Our source is going to be the last join after
# traversing them all.
{joins, [assoc]} = Enum.split(query.joins, -1)
# Update the mapping and start rewriting expressions
# to make the last join point to the new from source.
mapping = Map.put(%{}, length(joins) + 1, 0)
assoc_on = rewrite_expr(assoc.on, mapping)
%{query | wheres: [assoc_on|query.wheres], joins: joins,
from: merge_from(query.from, assoc.source), sources: nil}
|> distinct([x], true)
end
defp assoc_to_join(%{from: from, wheres: [on], order_bys: [], joins: []}, position) do
%JoinExpr{ix: position, qual: :inner, source: from,
on: rewrite_expr(on, %{0 => position}),
file: on.file, line: on.line}
end
defp merge_from({"join expression", _}, assoc_source), do: assoc_source
defp merge_from(from, _assoc_source), do: from
defp rewrite_expr(%{expr: expr, params: params} = part, mapping) do
expr =
Macro.prewalk expr, fn
{:&, meta, [ix]} ->
{:&, meta, [Map.get(mapping, ix, ix)]}
other ->
other
end
params =
Enum.map params, fn
{val, {composite, {ix, field}}} when is_integer(ix) ->
{val, {composite, {Map.get(mapping, ix, ix), field}}}
{val, {ix, field}} when is_integer(ix) ->
{val, {Map.get(mapping, ix, ix), field}}
val ->
val
end
%{part | expr: expr, params: params}
end
end
defmodule Ecto.Association.BelongsTo do
@moduledoc """
The association struct for a `belongs_to` association.
Its fields are:
* `cardinality` - The association cardinality
* `field` - The name of the association field on the model
* `owner` - The model where the association was defined
* `related` - The model that is associated
* `owner_key` - The key on the `owner` model used for the association
* `related_key` - The key on the `related` model used for the association
* `queryable` - The real query to use for querying association
* `defaults` - Default fields used when building the association
"""
@behaviour Ecto.Association
defstruct [:cardinality, :field, :owner, :related, :owner_key, :related_key, :queryable, defaults: []]
@doc false
def struct(module, name, opts) do
ref =
cond do
ref = opts[:references] ->
ref
primary_key = Module.get_attribute(module, :primary_key) ->
case elem(primary_key, 0) do
:id -> :id
key ->
IO.puts :stderr, "warning: #{inspect module} has a custom primary key and " <>
"invoked belongs_to(#{inspect name}). To avoid ambiguity, " <>
"please also specify the :references option in belongs_to " <>
"with the primary key name of the associated model, currently " <>
"it defaults to #{inspect key}\n#{Exception.format_stacktrace}"
key
end
true ->
:id
end
queryable = Keyword.fetch!(opts, :queryable)
related = Ecto.Association.related_from_query(queryable)
unless is_atom(related) do
raise ArgumentError, "association queryable must be a model, got: #{inspect related}"
end
%__MODULE__{
field: name,
cardinality: :one,
owner: module,
related: related,
owner_key: Keyword.fetch!(opts, :foreign_key),
related_key: ref,
queryable: queryable
}
end
@doc false
def build(%{field: name}, %{__struct__: struct}, _attributes) do
raise ArgumentError,
"cannot build belongs_to association #{inspect name} for #{inspect struct}. " <>
"Belongs to associations cannot be built with build/3, only the opposide side (has_one/has_many)"
end
@doc false
def joins_query(refl) do
from o in refl.owner,
join: q in ^refl.queryable,
on: field(q, ^refl.related_key) == field(o, ^refl.owner_key)
end
@doc false
def assoc_query(refl, values) do
assoc_query(refl, refl.queryable, values)
end
@doc false
def assoc_query(refl, query, values) do
from x in query,
where: field(x, ^refl.related_key) in ^values
end
@doc false
def preload_info(refl) do
{:assoc, refl, refl.related_key}
end
end
| 30.877329 | 111 | 0.64873 |
9e4aa9a5ab1c0ad7cbf9bb1f5609eff7d2daca0b | 2,691 | ex | Elixir | clients/display_video/lib/google_api/display_video/v1/model/maximize_spend_bid_strategy.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/display_video/lib/google_api/display_video/v1/model/maximize_spend_bid_strategy.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/display_video/lib/google_api/display_video/v1/model/maximize_spend_bid_strategy.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DisplayVideo.V1.Model.MaximizeSpendBidStrategy do
@moduledoc """
A strategy that automatically adjusts the bid to optimize a specified performance goal while spending the full budget.
## Attributes
* `customBiddingAlgorithmId` (*type:* `String.t`, *default:* `nil`) - The ID of the Custom Bidding Algorithm used by this strategy. Only applicable when performance_goal_type is set to `BIDDING_STRATEGY_PERFORMANCE_GOAL_TYPE_CUSTOM_ALGO`.
* `maxAverageCpmBidAmountMicros` (*type:* `String.t`, *default:* `nil`) - The maximum average CPM that may be bid, in micros of the advertiser's currency. Must be greater than or equal to a billable unit of the given currency. For example, 1500000 represents 1.5 standard units of the currency.
* `performanceGoalType` (*type:* `String.t`, *default:* `nil`) - Required. The type of the performance goal that the bidding strategy tries to minimize while spending the full budget. `BIDDING_STRATEGY_PERFORMANCE_GOAL_TYPE_VIEWABLE_CPM` is not supported for this strategy.
* `raiseBidForDeals` (*type:* `boolean()`, *default:* `nil`) - Controls whether the strategy takes deal floor prices into account.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:customBiddingAlgorithmId => String.t() | nil,
:maxAverageCpmBidAmountMicros => String.t() | nil,
:performanceGoalType => String.t() | nil,
:raiseBidForDeals => boolean() | nil
}
field(:customBiddingAlgorithmId)
field(:maxAverageCpmBidAmountMicros)
field(:performanceGoalType)
field(:raiseBidForDeals)
end
defimpl Poison.Decoder, for: GoogleApi.DisplayVideo.V1.Model.MaximizeSpendBidStrategy do
def decode(value, options) do
GoogleApi.DisplayVideo.V1.Model.MaximizeSpendBidStrategy.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DisplayVideo.V1.Model.MaximizeSpendBidStrategy do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 48.053571 | 298 | 0.753623 |
9e4aacdc4e887043312afbe50d9ba6cc08effb47 | 1,355 | exs | Elixir | test/oauth_xyz/model/interact_test.exs | ritou/elixir-oauth-xyz | 110d4eadb16fa5c106ae0f6fad49c0424bdbf477 | [
"MIT"
] | 2 | 2020-04-22T13:22:25.000Z | 2020-12-01T12:01:30.000Z | test/oauth_xyz/model/interact_test.exs | ritou/elixir-oauth-xyz | 110d4eadb16fa5c106ae0f6fad49c0424bdbf477 | [
"MIT"
] | 3 | 2019-12-05T01:32:09.000Z | 2019-12-09T01:15:32.000Z | test/oauth_xyz/model/interact_test.exs | ritou/elixir-oauth-xyz-web | 110d4eadb16fa5c106ae0f6fad49c0424bdbf477 | [
"MIT"
] | null | null | null | defmodule OAuthXYZ.Model.InteractTest do
use OAuthXYZ.DataCase
alias OAuthXYZ.Model.Interact
test "constructor" do
uri = "https://client.example.net/return/123455"
nonce = "LKLTI25DK82FX4T4QFZC"
interact =
Interact.parse(%{
"redirect" => true,
"callback" => %{
"uri" => uri,
"nonce" => nonce
}
})
assert interact.can_redirect
assert interact.callback == %{
"uri" => uri,
"nonce" => nonce
}
refute interact.can_user_code
refute interact.didcomm
refute interact.didcomm_query
interact =
Interact.parse(%{
"user_code" => true
})
refute interact.can_redirect
refute interact.callback
assert interact.can_user_code == true
refute interact.didcomm
refute interact.didcomm_query
interact =
Interact.parse(%{
"didcomm" => true
})
refute interact.can_redirect
refute interact.callback
refute interact.can_user_code
assert interact.didcomm == true
refute interact.didcomm_query
interact =
Interact.parse(%{
"didcomm_query" => true
})
refute interact.can_redirect
refute interact.callback
refute interact.can_user_code
refute interact.didcomm
assert interact.didcomm_query == true
end
end
| 21.171875 | 52 | 0.624354 |
9e4ad5e221dbfb105bc16839d7985b55770938b1 | 1,799 | ex | Elixir | chromoid_web/lib/chromoid/application.ex | ConnorRigby/chromoid | 6424a9234227d18d7c287ded869caeb31511bb97 | [
"Apache-2.0"
] | 7 | 2020-11-18T11:29:20.000Z | 2022-01-16T03:16:14.000Z | chromoid_web/lib/chromoid/application.ex | ConnorRigby/chromoid | 6424a9234227d18d7c287ded869caeb31511bb97 | [
"Apache-2.0"
] | null | null | null | chromoid_web/lib/chromoid/application.ex | ConnorRigby/chromoid | 6424a9234227d18d7c287ded869caeb31511bb97 | [
"Apache-2.0"
] | 1 | 2021-01-06T15:40:46.000Z | 2021-01-06T15:40:46.000Z | defmodule Chromoid.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
children = [
# Start the Ecto repositorys
Chromoid.Repo,
Chromoid.TimescaleRepo,
# Start the Telemetry supervisor
ChromoidWeb.Telemetry,
# Start the PubSub system
{Phoenix.PubSub, name: Chromoid.PubSub},
# Start the Discord bot
ChromoidDiscord.Supervisor,
# Start the Device Name registry
Chromoid.Devices.DeviceRegistry,
# Start the Device BLE Supervisor
Chromoid.Devices.BLESupervisor,
# Start the Device Relay Supervisor
Chromoid.Devices.RelaySupervisor,
# Start the Device Presence
Chromoid.Devices.Presence,
# Start the Device MQTT gateway
Chromoid.Devices.MQTTGateway,
Chromoid.Schedule.Presence,
Chromoid.Schedule.Registry,
# Start the schedule handler supervisor
# Chromoid.ScheduleSupervisor,
# Start the Runner checkup process
# Chromoid.Schedule.Runner,
# Start the Endpoint (http/https)
ChromoidWeb.Endpoint,
# Start the NFC/RFID WebHook processor
Chromoid.Devices.NFC.WebHookProcessor,
# Start the NFC/RFID Action processor
Chromoid.Devices.NFC.ActionProcessor
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Chromoid.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
ChromoidWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 32.125 | 62 | 0.704836 |
9e4ada21a613c291bcc7190b70099701b6d8215b | 808 | exs | Elixir | test/tacit_test.exs | J3RN/tacit | 2578342d500c135218b0c2dcc0b801c313f3564d | [
"MIT"
] | null | null | null | test/tacit_test.exs | J3RN/tacit | 2578342d500c135218b0c2dcc0b801c313f3564d | [
"MIT"
] | null | null | null | test/tacit_test.exs | J3RN/tacit | 2578342d500c135218b0c2dcc0b801c313f3564d | [
"MIT"
] | null | null | null | defmodule TacitTest do
use ExUnit.Case
defmodule Example do
import Tacit
import String, only: [capitalize: 1]
let remote =
String.split()
|> Enum.reverse()
let anonymous = then(&String.split(&1, ",")) |> Enum.reverse()
let imported = capitalize
let local = get_age |> add_one
defp get_age(%{age: age}), do: age
defp add_one(arg), do: arg + 1
end
test "handles remote functions" do
assert Example.remote("1 2 3") == ["3", "2", "1"]
end
test "handles anonymous functions" do
assert Example.anonymous("1,2,3") == ["3", "2", "1"]
end
test "handles imported functions" do
assert Example.imported("hello") == "Hello"
end
test "handles local functions" do
assert Example.local(%{name: "J3RN", age: 30}) == 31
end
end
| 22.444444 | 66 | 0.612624 |
9e4add105c083cb3470a5f370acf111112563732 | 512 | ex | Elixir | apps/dockup_ui/web/views/layout_view.ex | rudydydy/dockup | 0d05d1ef65cc5523800bd852178361521cd3e7d8 | [
"MIT"
] | 1 | 2019-08-20T07:45:10.000Z | 2019-08-20T07:45:10.000Z | apps/dockup_ui/web/views/layout_view.ex | rudydydy/dockup | 0d05d1ef65cc5523800bd852178361521cd3e7d8 | [
"MIT"
] | null | null | null | apps/dockup_ui/web/views/layout_view.ex | rudydydy/dockup | 0d05d1ef65cc5523800bd852178361521cd3e7d8 | [
"MIT"
] | 2 | 2019-03-08T10:51:34.000Z | 2019-08-20T07:45:13.000Z | defmodule DockupUi.LayoutView do
use DockupUi.Web, :view
def dockup_log_url do
case Application.fetch_env!(:dockup_ui, :backend_module) do
Dockup.Backends.Helm ->
base_url = Application.fetch_env!(:dockup, :stackdriver_url)
filter = "advancedFilter=resource.labels.container_name%3D%22dockup%22"
base_url <> "&" <> filter
_ ->
base_domain = Application.fetch_env!(:dockup, :base_domain)
"//logio.#{base_domain}/#?projectName=dockup"
end
end
end
| 32 | 79 | 0.677734 |
9e4adf5e7613f73ee20cae4af670305d4027d084 | 541 | exs | Elixir | test/spandex_otlp/adapter_test.exs | fr0stbyte/spandex_otlp | 5c311939c4035ea094cb7c22c96522e2bf08ba42 | [
"MIT"
] | 2 | 2021-05-24T16:34:13.000Z | 2021-05-25T06:14:54.000Z | test/spandex_otlp/adapter_test.exs | fr0stbyte/spandex_otlp | 5c311939c4035ea094cb7c22c96522e2bf08ba42 | [
"MIT"
] | 5 | 2021-05-22T19:30:03.000Z | 2021-05-25T03:01:10.000Z | test/spandex_otlp/adapter_test.exs | fr0stbyte/spandex_otlp | 5c311939c4035ea094cb7c22c96522e2bf08ba42 | [
"MIT"
] | 3 | 2021-05-25T06:15:00.000Z | 2022-01-28T18:58:59.000Z | defmodule SpandexOTLP.AdapterTest do
use ExUnit.Case, async: true
alias SpandexOTLP.Adapter
describe "now/1" do
test "returns the time in nanoseconds" do
t1 = Adapter.now()
t2 = Adapter.now()
assert t1 <= t2
end
end
describe "span_id/1" do
test "returns a unique span identifier" do
assert Adapter.span_id() != Adapter.span_id()
end
end
describe "trace_id/1" do
test "returns a unique trace identifier" do
assert Adapter.trace_id() != Adapter.trace_id()
end
end
end
| 20.037037 | 53 | 0.658041 |
9e4b02e244f48a3a1f816703004c0d5decc20cbf | 2,923 | exs | Elixir | test/blacklist_test.exs | prakriti07/reactive-interaction-gateway | c9d33064982b5ae12e9af7a300e90b3e7973d0b3 | [
"Apache-2.0"
] | null | null | null | test/blacklist_test.exs | prakriti07/reactive-interaction-gateway | c9d33064982b5ae12e9af7a300e90b3e7973d0b3 | [
"Apache-2.0"
] | 1 | 2021-02-23T18:08:32.000Z | 2021-02-23T18:08:32.000Z | test/blacklist_test.exs | prakriti07/reactive-interaction-gateway | c9d33064982b5ae12e9af7a300e90b3e7973d0b3 | [
"Apache-2.0"
] | 1 | 2020-07-17T05:17:32.000Z | 2020-07-17T05:17:32.000Z | defmodule BlacklistTest do
@moduledoc """
Blacklisting a session should terminate active connections and prevent new ones.
"""
use ExUnit.Case, async: true
alias RIG.JWT
@rig_api_url "http://localhost:4010/"
describe "After blacklisting a session," do
test "the API reports the session to be blacklisted." do
session_id = "some random string 90238490829084902342"
blacklist(session_id)
assert blacklisted?(session_id)
end
test "new connections using the same session are no longer allowed." do
# blacklist a JWT
session_id = "some random string 98908462643632748511213123"
blacklist(session_id)
# try to connect and verify it doesn't work
jwt = JWT.encode(%{"jti" => session_id})
assert {:error, %{code: 400}} = SseClient.try_connect_then_disconnect(jwt: jwt)
assert {:error, _} = WsClient.try_connect_then_disconnect(jwt: jwt)
end
test "active connections related to that session are terminated." do
# Connect to RIG using a JWT:
session_id = "some random string 8902731973190231212"
jwt = JWT.encode(%{"jti" => session_id})
assert {:ok, sse} = SseClient.connect(jwt: jwt)
{_, sse} = SseClient.read_welcome_event(sse)
{_, sse} = SseClient.read_subscriptions_set_event(sse)
assert {:ok, ws} = WsClient.connect(jwt: jwt)
{_, ws} = WsClient.read_welcome_event(ws)
{_, ws} = WsClient.read_subscriptions_set_event(ws)
# Create an additional connection using a different JWT:
other_session_id = "some random string 97123689684290890423312"
other_jwt = JWT.encode(%{"jti" => other_session_id})
assert {:ok, other_sse} = SseClient.connect(jwt: other_jwt)
{_, other_sse} = SseClient.read_welcome_event(other_sse)
{_, other_sse} = SseClient.read_subscriptions_set_event(other_sse)
# Blacklist only the first JWT using RIG's HTTP API:
blacklist(session_id)
# Verify all connections but the last one have been dropped:
assert {_event, sse} = SseClient.read_event(sse, "rig.session_killed")
assert {:closed, sse} = SseClient.status(sse)
assert {:closed, ws} = WsClient.status(ws)
assert {:ok, other_sse} = SseClient.refute_receive(other_sse)
assert {:open, other_sse} = SseClient.status(other_sse)
end
end
# ---
defp blacklist(session_id) do
body =
%{validityInSeconds: 60, sessionId: session_id}
|> Jason.encode!()
{:ok, %HTTPoison.Response{status_code: 201}} =
HTTPoison.post("#{@rig_api_url}/v2/session-blacklist", body, [
{"content-type", "application/json"}
])
end
# ---
defp blacklisted?(jti) do
case HTTPoison.get("#{@rig_api_url}/v2/session-blacklist/#{URI.encode(jti)}") do
{:ok, %HTTPoison.Response{status_code: 200}} -> true
{:ok, %HTTPoison.Response{status_code: 404}} -> false
end
end
end
| 32.477778 | 85 | 0.673623 |
9e4b4574037bb856d0d8485e33d9ba36ab137ef9 | 986 | exs | Elixir | test/processor_state_test.exs | pragdave/jeeves | a5f745463f70540de1b0bbea29e629adbf0c45e8 | [
"Apache-2.0"
] | 87 | 2017-03-03T05:53:04.000Z | 2020-07-30T02:41:50.000Z | test/processor_state_test.exs | pragdave/jeeves | a5f745463f70540de1b0bbea29e629adbf0c45e8 | [
"Apache-2.0"
] | 4 | 2017-03-03T18:44:55.000Z | 2017-08-11T16:25:32.000Z | test/processor_state_test.exs | pragdave/service | a5f745463f70540de1b0bbea29e629adbf0c45e8 | [
"Apache-2.0"
] | 6 | 2017-03-03T18:11:46.000Z | 2018-06-27T23:53:16.000Z | defmodule ProcessorStateTest do
use ExUnit.Case
alias Jeeves.Util.PreprocessorState, as: PS
@some_options %{ name: "Vince", status: "playa" }
@name PS.name_for(__MODULE__)
test "can be started and stopped" do
assert Process.whereis(@name) == nil
PS.start_link(__MODULE__, @some_options)
assert is_pid(Process.whereis(@name))
PS.stop(__MODULE__)
assert Process.whereis(@name) == nil
end
describe "Once started" do
setup do
PS.start_link(__MODULE__, @some_options) # linked to test process, so no need to stop
:ok
end
test "maintains initial options" do
assert PS.options(__MODULE__) == @some_options
end
test "maintains starts with no functions" do
assert PS.function_list(__MODULE__) == []
end
test "records functions" do
PS.add_function(__MODULE__, :one)
PS.add_function(__MODULE__, :two)
assert PS.function_list(__MODULE__) == [ :two, :one ]
end
end
end
| 24.65 | 92 | 0.669371 |
9e4b6a06b1326c2d8764b00d67a275ba70beeb14 | 2,994 | ex | Elixir | apps/astarte_appengine_api/lib/astarte_appengine_api/rpc/data_updater_plant.ex | matt-mazzucato/astarte | 34d84941a5019efc42321052f7f34b7d907a38f2 | [
"Apache-2.0"
] | 4 | 2018-03-15T14:26:43.000Z | 2019-12-20T15:07:10.000Z | apps/astarte_appengine_api/lib/astarte_appengine_api/rpc/data_updater_plant.ex | matt-mazzucato/astarte | 34d84941a5019efc42321052f7f34b7d907a38f2 | [
"Apache-2.0"
] | 27 | 2018-06-28T16:00:07.000Z | 2019-12-19T10:05:36.000Z | apps/astarte_appengine_api/lib/astarte_appengine_api/rpc/data_updater_plant.ex | matt-mazzucato/astarte | 34d84941a5019efc42321052f7f34b7d907a38f2 | [
"Apache-2.0"
] | 2 | 2018-03-16T09:37:29.000Z | 2019-11-18T17:37:42.000Z | #
# This file is part of Astarte.
#
# Copyright 2018 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule Astarte.AppEngine.API.RPC.DataUpdaterPlant do
@moduledoc """
This module sends RPC to DataUpdaterPlant
"""
require Logger
alias Astarte.RPC.Protocol.DataUpdaterPlant, as: Protocol
alias Astarte.RPC.Protocol.DataUpdaterPlant.{
Call,
DeleteVolatileTrigger,
GenericErrorReply,
GenericOkReply,
InstallVolatileTrigger,
Reply
}
alias Astarte.AppEngine.API.Config
alias Astarte.AppEngine.API.RPC.DataUpdaterPlant.VolatileTrigger
@rpc_client Config.rpc_client()
@destination Protocol.amqp_queue()
def install_volatile_trigger(realm_name, device_id, %VolatileTrigger{} = volatile_trigger) do
%VolatileTrigger{
object_id: object_id,
object_type: object_type,
serialized_simple_trigger: serialized_simple_trigger,
parent_id: parent_id,
simple_trigger_id: simple_trigger_id,
serialized_trigger_target: serialized_trigger_target
} = volatile_trigger
%InstallVolatileTrigger{
realm_name: realm_name,
device_id: device_id,
object_id: object_id,
object_type: object_type,
parent_id: parent_id,
simple_trigger: serialized_simple_trigger,
simple_trigger_id: simple_trigger_id,
trigger_target: serialized_trigger_target
}
|> encode_call(:install_volatile_trigger)
|> @rpc_client.rpc_call(@destination)
|> decode_reply()
|> extract_reply()
end
def delete_volatile_trigger(realm_name, device_id, trigger_id) do
%DeleteVolatileTrigger{
realm_name: realm_name,
device_id: device_id,
trigger_id: trigger_id
}
|> encode_call(:delete_volatile_trigger)
|> @rpc_client.rpc_call(@destination)
|> decode_reply()
|> extract_reply()
end
defp encode_call(call, callname) do
%Call{call: {callname, call}}
|> Call.encode()
end
defp decode_reply({:ok, encoded_reply}) when is_binary(encoded_reply) do
%Reply{reply: reply} = Reply.decode(encoded_reply)
reply
end
defp decode_reply({:error, reason}) do
_ = Logger.warn("RPC error: #{inspect(reason)}.", tag: "rpc_remote_exception")
{:error, reason}
end
defp extract_reply({:generic_ok_reply, %GenericOkReply{}}) do
:ok
end
defp extract_reply({:generic_error_reply, error_struct = %GenericErrorReply{}}) do
error_map = Map.from_struct(error_struct)
{:error, error_map}
end
end
| 28.245283 | 95 | 0.727121 |
9e4b6da639dfed138e371f97c9ab9799a1daecd4 | 1,689 | ex | Elixir | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/create_folder_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/create_folder_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/create_folder_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudResourceManager.V1.Model.CreateFolderMetadata do
@moduledoc """
Metadata pertaining to the Folder creation process.
## Attributes
* `displayName` (*type:* `String.t`, *default:* `nil`) - The display name of the folder.
* `parent` (*type:* `String.t`, *default:* `nil`) - The resource name of the folder or organization we are creating the folder under.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:displayName => String.t() | nil,
:parent => String.t() | nil
}
field(:displayName)
field(:parent)
end
defimpl Poison.Decoder, for: GoogleApi.CloudResourceManager.V1.Model.CreateFolderMetadata do
def decode(value, options) do
GoogleApi.CloudResourceManager.V1.Model.CreateFolderMetadata.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudResourceManager.V1.Model.CreateFolderMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.78 | 137 | 0.734754 |
9e4b736bba986753a34b50d83529fbba17ccc3e4 | 3,924 | exs | Elixir | lib/ex_unit/test/ex_unit/supervised_test.exs | britto/elixir | 1f6e7093cff4b68dada60b924399bc8404d39a7e | [
"Apache-2.0"
] | 2 | 2020-06-02T18:00:28.000Z | 2021-12-10T03:21:42.000Z | lib/ex_unit/test/ex_unit/supervised_test.exs | britto/elixir | 1f6e7093cff4b68dada60b924399bc8404d39a7e | [
"Apache-2.0"
] | 1 | 2020-09-14T16:23:33.000Z | 2021-03-25T17:38:59.000Z | lib/ex_unit/test/ex_unit/supervised_test.exs | britto/elixir | 1f6e7093cff4b68dada60b924399bc8404d39a7e | [
"Apache-2.0"
] | 1 | 2018-01-09T20:10:59.000Z | 2018-01-09T20:10:59.000Z | Code.require_file("../test_helper.exs", __DIR__)
defmodule ExUnit.SupervisedTest do
use ExUnit.Case, async: true
defmodule MyAgent do
use Agent
def start_link(:error) do
{:error, "some error"}
end
def start_link(:exception) do
raise "some exception"
end
def start_link(arg) do
Agent.start_link(fn -> arg end, name: __MODULE__)
end
end
test "returns error if the supervised process returns an error tuple" do
{:error, error} = start_supervised({MyAgent, :error})
assert {"some error", _info} = error
message =
"failed to start child with the spec {ExUnit.SupervisedTest.MyAgent, :error}.\n" <>
"Reason: \"some error\""
assert_raise RuntimeError, message, fn ->
start_supervised!({MyAgent, :error})
end
end
test "returns error if the supervised process raises an exception" do
{:error, {{:EXIT, {exception, _}}, _}} = start_supervised({MyAgent, :exception})
assert exception == %RuntimeError{message: "some exception"}
message =
"failed to start child with the spec {ExUnit.SupervisedTest.MyAgent, :exception}.\n" <>
"Reason: an exception was raised:\n" <> " ** (RuntimeError) some exception"
exception =
assert_raise RuntimeError, fn ->
start_supervised!({MyAgent, :exception})
end
assert String.starts_with?(Exception.message(exception), message)
end
test "starts a supervised process that terminates before on_exit" do
{:ok, pid} = start_supervised(MyAgent)
assert Process.alive?(pid)
on_exit(fn -> refute Process.alive?(pid) end)
end
test "starts a supervised process that is permanent" do
{:ok, _} = start_supervised({MyAgent, 0})
Agent.update(MyAgent, &(&1 + 1))
assert Agent.get(MyAgent, & &1) == 1
Agent.stop(MyAgent)
wait_until_registered(MyAgent)
assert Agent.get(MyAgent, & &1) == 0
end
test "starts a supervised process that is temporary" do
{:ok, _} = start_supervised({MyAgent, 0}, restart: :temporary)
Agent.update(MyAgent, &(&1 + 1))
assert Agent.get(MyAgent, & &1) == 1
Agent.stop(MyAgent)
refute Process.whereis(MyAgent)
end
test "starts a supervised process with ID checks" do
{:ok, pid} = start_supervised({MyAgent, 0})
assert {:error, {:duplicate_child_name, ExUnit.SupervisedTest.MyAgent}} =
start_supervised({MyAgent, 0})
assert {:error, {{:already_started, ^pid}, _}} = start_supervised({MyAgent, 0}, id: :another)
assert_raise RuntimeError, ~r"Reason: bad child specification", fn ->
start_supervised!(%{id: 1, start: :oops})
end
assert_raise RuntimeError, ~r"Reason: already started", fn ->
start_supervised!({MyAgent, 0}, id: :another)
end
end
test "stops a supervised process" do
{:ok, pid} = start_supervised({MyAgent, 0})
assert stop_supervised(MyAgent) == :ok
refute Process.alive?(pid)
end
test "stops! a supervised process" do
{:ok, pid} = start_supervised({MyAgent, 0})
assert stop_supervised!(MyAgent) == :ok
refute Process.alive?(pid)
end
test "does not stop unknown processes" do
assert stop_supervised(:unknown) == {:error, :not_found}
{:ok, _} = start_supervised({MyAgent, 0})
assert stop_supervised(:unknown) == {:error, :not_found}
end
test "raises if starting or stopping outside of test process" do
Task.async(fn ->
message = "start_supervised/2 can only be invoked from the test process"
assert_raise ArgumentError, message, fn ->
start_supervised(MyAgent)
end
message = "stop_supervised/1 can only be invoked from the test process"
assert_raise ArgumentError, message, fn ->
stop_supervised(MyAgent)
end
end)
|> Task.await()
end
defp wait_until_registered(name) do
unless Process.whereis(name) do
wait_until_registered(name)
end
end
end
| 29.727273 | 97 | 0.664118 |
9e4b99e187e5d03fac7a453cf803f476a80e9594 | 3,605 | ex | Elixir | Chapter6/6.2/key_value_gen_server.ex | benjamindburke/elixir-studies | 65231b5af83dcf701041cae2879107c3bd3e5078 | [
"Unlicense"
] | null | null | null | Chapter6/6.2/key_value_gen_server.ex | benjamindburke/elixir-studies | 65231b5af83dcf701041cae2879107c3bd3e5078 | [
"Unlicense"
] | null | null | null | Chapter6/6.2/key_value_gen_server.ex | benjamindburke/elixir-studies | 65231b5af83dcf701041cae2879107c3bd3e5078 | [
"Unlicense"
] | null | null | null | # KeyValueStore
# Explore servers that utilize the GenServer wrapper of the OTP gen_server behaviour
defmodule KeyValueStore do
use GenServer
# ---------
# GenServer behaviour hook functions
# ---------
# hook into GenServer
# the @impl module attribute can perform compile-time checking of contracts between behaviours
@impl GenServer
def init(_) do
# say there's a use case for periodic tasks such as server cleanup
# the Erlang library :timer.send_interval/2 can be used to periodically send data to the server
:timer.send_interval(5000, :cleanup)
{:ok, %{}}
# we can also choose not to start the server, or to ignore the request:
# using the form {:stop, reason}, the return signature of start/0 will be {:error, reason}
# {:stop, reason} should be used when the server has encountered an error and cannot proceed
# using the form :ignore, the return signature of start/0 will be :ignore
# :ignore should be used when stopping the server is a normal course of action
end
# the handle_info GenServer hook can be used to listen to periodically emitted data
# these requests are neither calls nor casts
# the @impl module attribute can perform compile-time checking of contracts between behaviours
@impl GenServer
def handle_info(:cleanup, state) do
IO.puts("Performing cleanup...")
{:noreply, state}
end
# requests to stop the server can also be issued inside handle_* functions
# returning {:stop, reason, new_state} will cause GenServer to stop the running process
# :normal should be used as the stoppage reason if stopping the process is standard workflow
# the @impl module attribute can perform compile-time checking of contracts between behaviours
@impl GenServer
def handle_cast({:put, key, value}, state) do
{:noreply, Map.put(state, key, value)}
end
# if a handle_call/3 function returns a stoppage and also needs to respond to the caller,
# return {:stop, reason, rseponse, new_state}
# but why return a new_state if the process is terminating?
# in some cases, new_state may be necessary for any necessary cleanup
# the @impl module attribute can perform compile-time checking of contracts between behaviours
# handle_call functions must always be arity 3, never arity 2
# as GenServer requires arity 3, this will raise a compilation error if not arity 3
@impl GenServer
def handle_call({:get, key}, _, state) do
# _ is a tuple containing the request ID (created by GenServer internals) and the calling pid
# this example has no use for this information so we ignore it
{:reply, Map.get(state, key), state}
end
# ---------
# Interface functions
# ---------
def start do
# using the __MODULE__ special form,
# we can freely change the KeyValueStore name in one place and have it update everywhere
GenServer.start(
__MODULE__, # current module name (KeyValueStore)
nil, # initial state
name: __MODULE__ # local name that applies only to the currently running BEAM instance (KeyValueStore)
)
end
def stop(reason \\ :normal, timeout \\ :infinity) do
# the server can also be stopped using GenServer.stop/3
GenServer.stop(__MODULE__, reason, timeout)
end
def put(key, value) do
GenServer.cast(__MODULE__, {:put, key, value})
end
def get(key) do
# NOTE: GenServer.call/2 does not wait indefinitely for a response
# After a default of 5 seconds, the request times out
# GenServer.call/3 can be used to specify a longer timeout
GenServer.call(__MODULE__, {:get, key})
end
end | 39.184783 | 108 | 0.720388 |
9e4bbeb3aa04d35ae7f6a2de5292ecf0d39b31d8 | 192 | ex | Elixir | lib/mapi/response/json.ex | codedge-llc/mapi | 5e0db722658534bc06c7538b202083e6e7be240c | [
"MIT"
] | 3 | 2018-02-05T03:37:27.000Z | 2020-02-19T23:58:22.000Z | lib/mapi/response/json.ex | codedge-llc/mapi | 5e0db722658534bc06c7538b202083e6e7be240c | [
"MIT"
] | null | null | null | lib/mapi/response/json.ex | codedge-llc/mapi | 5e0db722658534bc06c7538b202083e6e7be240c | [
"MIT"
] | null | null | null | defmodule Mapi.Response.Json do
@moduledoc false
@behaviour Mapi.Response
def content_type, do: "application/json"
def format_result(result) do
Poison.encode!(result)
end
end
| 16 | 42 | 0.739583 |
9e4bc9fef2775d141a82a347446d516ccdeb6d9b | 356 | exs | Elixir | priv/repo/migrations/20141227132645_initial_ledgers_create.exs | walterbm/hyperledger-beta | 493cd02597775be917427d2d75ff293f53d3bc9d | [
"Apache-2.0"
] | 1 | 2017-06-14T07:04:47.000Z | 2017-06-14T07:04:47.000Z | priv/repo/migrations/20141227132645_initial_ledgers_create.exs | walterbm/hyperledger-beta | 493cd02597775be917427d2d75ff293f53d3bc9d | [
"Apache-2.0"
] | null | null | null | priv/repo/migrations/20141227132645_initial_ledgers_create.exs | walterbm/hyperledger-beta | 493cd02597775be917427d2d75ff293f53d3bc9d | [
"Apache-2.0"
] | null | null | null | defmodule Hyperledger.Repo.Migrations.InitialLedgersCreate do
use Ecto.Migration
def up do
create table(:ledgers, primary_key: false) do
add :hash, :string, primary_key: true
add :public_key, :string
add :primary_account_public_key, :string
timestamps
end
end
def down do
drop table(:ledgers)
end
end
| 19.777778 | 61 | 0.685393 |
9e4bd7d44f84207d8650d687571672893e88001a | 261 | ex | Elixir | lib/transferdrop.ex | sheosi/transferdrop | 9021fd1c0d2d28dddc141264ce95a4e8d896c3ee | [
"MIT"
] | null | null | null | lib/transferdrop.ex | sheosi/transferdrop | 9021fd1c0d2d28dddc141264ce95a4e8d896c3ee | [
"MIT"
] | null | null | null | lib/transferdrop.ex | sheosi/transferdrop | 9021fd1c0d2d28dddc141264ce95a4e8d896c3ee | [
"MIT"
] | null | null | null | defmodule Transferdrop do
@moduledoc """
Transferdrop keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 26.1 | 66 | 0.762452 |
9e4bf7d3c1ce8387c767c805f470da2a3d75124f | 1,301 | ex | Elixir | test/support/conn_case.ex | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | null | null | null | test/support/conn_case.ex | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | 1 | 2021-07-24T16:26:03.000Z | 2021-07-24T16:26:03.000Z | test/support/conn_case.ex | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | null | null | null | defmodule SanbaseWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
import Plug.Conn
import Phoenix.ConnTest
import SanbaseWeb.Router.Helpers
# The default endpoint for testing
@endpoint SanbaseWeb.Endpoint
end
end
setup tags do
require Sanbase.CaseHelpers
SanbaseWeb.Graphql.Cache.clear_all()
Sanbase.CaseHelpers.checkout_shared(tags)
conn = Phoenix.ConnTest.build_conn()
product_and_plans = Sanbase.Billing.TestSeed.seed_products_and_plans()
{:ok,
conn: conn,
product: Map.get(product_and_plans, :product),
plans: Map.delete(product_and_plans, :product),
next_integer: fn -> :erlang.unique_integer([:monotonic, :positive]) end}
end
end
| 27.104167 | 77 | 0.727133 |
9e4bfa51e2e0664d9e977a0c330ca741cedba632 | 1,956 | exs | Elixir | config/dev.exs | niku/nigiwaiki | 52c37d2abb2d19084d6bda69c10773ed87701135 | [
"MIT"
] | 2 | 2017-12-07T10:57:47.000Z | 2018-02-04T09:01:05.000Z | config/dev.exs | niku/nigiwaiki | 52c37d2abb2d19084d6bda69c10773ed87701135 | [
"MIT"
] | null | null | null | config/dev.exs | niku/nigiwaiki | 52c37d2abb2d19084d6bda69c10773ed87701135 | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :nigiwiki, NigiwikiWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/brunch/bin/brunch",
"watch",
"--stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# command from your terminal:
#
# openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -keyout priv/server.key -out priv/server.pem
#
# The `http:` config above can be replaced with:
#
# https: [port: 4000, keyfile: "priv/server.key", certfile: "priv/server.pem"],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :nigiwiki, NigiwikiWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/nigiwiki_web/views/.*(ex)$},
~r{lib/nigiwiki_web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Configure your database
config :nigiwiki, Nigiwiki.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "nigiwiki_dev",
hostname: "localhost",
pool_size: 10
| 30.092308 | 170 | 0.697853 |
9e4c1a572808b5e8945d1d3d38a7e081cbab9203 | 3,490 | ex | Elixir | clients/slides/lib/google_api/slides/v1/model/line_properties.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/slides/lib/google_api/slides/v1/model/line_properties.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/slides/lib/google_api/slides/v1/model/line_properties.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Slides.V1.Model.LineProperties do
@moduledoc """
The properties of the Line.
When unset, these fields default to values that match the appearance of
new lines created in the Slides editor.
## Attributes
* `dashStyle` (*type:* `String.t`, *default:* `nil`) - The dash style of the line.
* `endArrow` (*type:* `String.t`, *default:* `nil`) - The style of the arrow at the end of the line.
* `endConnection` (*type:* `GoogleApi.Slides.V1.Model.LineConnection.t`, *default:* `nil`) - The connection at the end of the line. If unset, there is no connection.
Only lines with a Type indicating it is
a "connector" can have an `end_connection`.
* `lineFill` (*type:* `GoogleApi.Slides.V1.Model.LineFill.t`, *default:* `nil`) - The fill of the line. The default line fill matches the defaults for new
lines created in the Slides editor.
* `link` (*type:* `GoogleApi.Slides.V1.Model.Link.t`, *default:* `nil`) - The hyperlink destination of the line. If unset, there is no link.
* `startArrow` (*type:* `String.t`, *default:* `nil`) - The style of the arrow at the beginning of the line.
* `startConnection` (*type:* `GoogleApi.Slides.V1.Model.LineConnection.t`, *default:* `nil`) - The connection at the beginning of the line. If unset, there is no
connection.
Only lines with a Type indicating it is
a "connector" can have a `start_connection`.
* `weight` (*type:* `GoogleApi.Slides.V1.Model.Dimension.t`, *default:* `nil`) - The thickness of the line.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:dashStyle => String.t(),
:endArrow => String.t(),
:endConnection => GoogleApi.Slides.V1.Model.LineConnection.t(),
:lineFill => GoogleApi.Slides.V1.Model.LineFill.t(),
:link => GoogleApi.Slides.V1.Model.Link.t(),
:startArrow => String.t(),
:startConnection => GoogleApi.Slides.V1.Model.LineConnection.t(),
:weight => GoogleApi.Slides.V1.Model.Dimension.t()
}
field(:dashStyle)
field(:endArrow)
field(:endConnection, as: GoogleApi.Slides.V1.Model.LineConnection)
field(:lineFill, as: GoogleApi.Slides.V1.Model.LineFill)
field(:link, as: GoogleApi.Slides.V1.Model.Link)
field(:startArrow)
field(:startConnection, as: GoogleApi.Slides.V1.Model.LineConnection)
field(:weight, as: GoogleApi.Slides.V1.Model.Dimension)
end
defimpl Poison.Decoder, for: GoogleApi.Slides.V1.Model.LineProperties do
def decode(value, options) do
GoogleApi.Slides.V1.Model.LineProperties.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Slides.V1.Model.LineProperties do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 44.177215 | 169 | 0.701433 |
9e4c3249ca934de84db4d1417724de7cdf06e1ef | 7,809 | exs | Elixir | apps/omg_watcher/test/fixtures.exs | karmonezz/elixir-omg | 3b26fc072fa553992277e1b9c4bad37b3d61ec6a | [
"Apache-2.0"
] | 1 | 2020-05-01T12:30:09.000Z | 2020-05-01T12:30:09.000Z | apps/omg_watcher/test/fixtures.exs | karmonezz/elixir-omg | 3b26fc072fa553992277e1b9c4bad37b3d61ec6a | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher/test/fixtures.exs | karmonezz/elixir-omg | 3b26fc072fa553992277e1b9c4bad37b3d61ec6a | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# unfortunately something is wrong with the fixtures loading in `test_helper.exs` and the following needs to be done
Code.require_file("#{__DIR__}/../../omg_child_chain/test/omg_child_chain/integration/fixtures.exs")
defmodule OMG.Watcher.Fixtures do
use ExUnitFixtures.FixtureModule
use OMG.Eth.Fixtures
use OMG.DB.Fixtures
use OMG.ChildChain.Integration.Fixtures
use OMG.Utils.LoggerExt
alias Ecto.Adapters.SQL
alias FakeServer.Agents.EnvAgent
alias FakeServer.HTTP.Server
alias OMG.Crypto
alias OMG.Watcher
alias OMG.Watcher.DB
alias Watcher.TestHelper
@eth OMG.Eth.RootChain.eth_pseudo_address()
deffixture child_chain(contract, fee_file) do
config_file_path = Briefly.create!(extname: ".exs")
db_path = Briefly.create!(directory: true)
config_file_path
|> File.open!([:write])
|> IO.binwrite("""
#{OMG.Eth.DevHelpers.create_conf_file(contract)}
config :omg_db, path: "#{db_path}"
# this causes the inner test child chain server process to log debug. To see these logs adjust test's log level
config :logger, level: :debug
config :omg_child_chain, fee_specs_file_name: "#{fee_file}"
""")
|> File.close()
{:ok, config} = File.read(config_file_path)
Logger.debug(IO.ANSI.format([:blue, :bright, config], true))
Logger.debug("Starting db_init")
exexec_opts_for_mix = [
stdout: :stream,
cd: Application.fetch_env!(:omg_watcher, :umbrella_root_dir),
env: %{"MIX_ENV" => to_string(Mix.env())},
# group 0 will create a new process group, equal to the OS pid of that process
group: 0,
kill_group: true
]
{:ok, _db_proc, _ref, [{:stream, db_out, _stream_server}]} =
Exexec.run_link(
"mix run --no-start -e ':ok = OMG.DB.init()' --config #{config_file_path} 2>&1",
exexec_opts_for_mix
)
db_out |> Enum.each(&log_output("db_init", &1))
child_chain_mix_cmd = " mix xomg.child_chain.start --config #{config_file_path} 2>&1"
Logger.info("Starting child_chain")
{:ok, child_chain_proc, _ref, [{:stream, child_chain_out, _stream_server}]} =
Exexec.run_link(child_chain_mix_cmd, exexec_opts_for_mix)
fn ->
child_chain_out |> Enum.each(&log_output("child_chain", &1))
end
|> Task.async()
on_exit(fn ->
# NOTE see DevGeth.stop/1 for details
_ = Process.monitor(child_chain_proc)
:ok =
case Exexec.stop_and_wait(child_chain_proc) do
:normal ->
:ok
:shutdown ->
:ok
:noproc ->
:ok
other ->
_ = Logger.warn("Child chain stopped with an unexpected reason")
other
end
File.rm(config_file_path)
File.rm_rf(db_path)
end)
:ok
end
defp log_output(prefix, line) do
Logger.debug("#{prefix}: " <> line)
line
end
deffixture watcher(db_initialized, root_chain_contract_config) do
:ok = db_initialized
:ok = root_chain_contract_config
{:ok, started_apps} = Application.ensure_all_started(:omg_db)
{:ok, started_watcher} = Application.ensure_all_started(:omg_watcher)
{:ok, started_watcher_api} = Application.ensure_all_started(:omg_watcher_rpc)
[] = DB.Repo.all(DB.Block)
on_exit(fn ->
Application.put_env(:omg_db, :path, nil)
(started_apps ++ started_watcher ++ started_watcher_api)
|> Enum.reverse()
|> Enum.map(fn app -> :ok = Application.stop(app) end)
end)
end
deffixture web_endpoint do
Application.ensure_all_started(:spandex_ecto)
Application.ensure_all_started(:telemetry)
:telemetry.attach(
"spandex-query-tracer",
[:omg, :watcher, :db, :repo, :query],
&SpandexEcto.TelemetryAdapter.handle_event/4,
nil
)
{:ok, pid} =
Supervisor.start_link(
[
%{id: OMG.WatcherRPC.Web.Endpoint, start: {OMG.WatcherRPC.Web.Endpoint, :start_link, []}, type: :supervisor}
],
strategy: :one_for_one,
name: Watcher.Endpoint
)
_ = Application.load(:omg_watcher_rpc)
on_exit(fn ->
TestHelper.wait_for_process(pid)
:ok
end)
end
@doc "run only database in sandbox and endpoint to make request"
deffixture phoenix_ecto_sandbox(web_endpoint) do
:ok = web_endpoint
{:ok, pid} =
Supervisor.start_link(
[%{id: DB.Repo, start: {DB.Repo, :start_link, []}, type: :supervisor}],
strategy: :one_for_one,
name: Watcher.Supervisor
)
:ok = SQL.Sandbox.checkout(DB.Repo)
# setup and body test are performed in one process, `on_exit` is performed in another
on_exit(fn ->
TestHelper.wait_for_process(pid)
:ok
end)
end
deffixture initial_blocks(alice, bob, blocks_inserter, initial_deposits) do
:ok = initial_deposits
[
{1000,
[
OMG.TestHelper.create_recovered([{1, 0, 0, alice}], @eth, [{bob, 300}]),
OMG.TestHelper.create_recovered([{1000, 0, 0, bob}], @eth, [{alice, 100}, {bob, 200}])
]},
{2000,
[
OMG.TestHelper.create_recovered([{1000, 1, 0, alice}], @eth, [{bob, 99}, {alice, 1}], <<1337::256>>)
]},
{3000,
[
OMG.TestHelper.create_recovered([], @eth, [{alice, 150}]),
OMG.TestHelper.create_recovered([{1000, 1, 1, bob}], @eth, [{bob, 150}, {alice, 50}])
]}
]
|> blocks_inserter.()
end
deffixture initial_deposits(alice, bob, phoenix_ecto_sandbox) do
:ok = phoenix_ecto_sandbox
deposits = [
%{
root_chain_txhash: Crypto.hash(<<1000::256>>),
log_index: 0,
owner: alice.addr,
currency: @eth,
amount: 333,
blknum: 1
},
%{
root_chain_txhash: Crypto.hash(<<2000::256>>),
log_index: 0,
owner: bob.addr,
currency: @eth,
amount: 100,
blknum: 2
}
]
# Initial data depending tests can reuse
DB.EthEvent.insert_deposits!(deposits)
:ok
end
deffixture blocks_inserter(phoenix_ecto_sandbox) do
:ok = phoenix_ecto_sandbox
fn blocks -> blocks |> Enum.flat_map(&prepare_one_block/1) end
end
deffixture test_server do
{:ok, server_id, port} = Server.run()
env = FakeServer.Env.new(port)
EnvAgent.save_env(server_id, env)
real_addr = Application.fetch_env!(:omg_watcher, :child_chain_url)
old_client_env = Application.fetch_env!(:omg_watcher, :child_chain_url)
fake_addr = "http://#{env.ip}:#{env.port}"
on_exit(fn ->
Application.put_env(:omg_watcher, :child_chain_url, old_client_env)
Server.stop(server_id)
EnvAgent.delete_env(server_id)
end)
%{
real_addr: real_addr,
fake_addr: fake_addr,
server_id: server_id
}
end
defp prepare_one_block({blknum, recovered_txs}) do
{:ok, _} =
DB.Transaction.update_with(%{
transactions: recovered_txs,
blknum: blknum,
blkhash: "##{blknum}",
timestamp: 1_540_465_606,
eth_height: 1
})
recovered_txs
|> Enum.with_index()
|> Enum.map(fn {recovered_tx, txindex} -> {blknum, txindex, recovered_tx.tx_hash, recovered_tx} end)
end
end
| 28.191336 | 118 | 0.641952 |
9e4c5945dfd6b7b90b39a4d5433b04c0aabde897 | 2,519 | ex | Elixir | lib/deneb/chart.ex | tyrchen/deneb | 97759b57700816471feabf3fb97676d5e28e2261 | [
"MIT"
] | 4 | 2021-03-07T01:11:30.000Z | 2021-09-03T06:40:48.000Z | lib/deneb/chart.ex | tyrchen/deneb | 97759b57700816471feabf3fb97676d5e28e2261 | [
"MIT"
] | null | null | null | lib/deneb/chart.ex | tyrchen/deneb | 97759b57700816471feabf3fb97676d5e28e2261 | [
"MIT"
] | 1 | 2021-05-04T12:06:39.000Z | 2021-05-04T12:06:39.000Z | defmodule Deneb.Chart do
@moduledoc """
Vega-lite chart implementation
"""
use TypedStruct
alias Deneb.{Chart, Mark, Encoding, Projection, Selection, Transform, Utils}
typedstruct do
@typedoc "chart properties"
field :mark, Mark.t() | nil, default: nil
field :encoding, Encoding.t() | nil, default: nil
field :projection, Selection.t() | nil, default: nil
field :selection, Projection.t() | nil, default: nil
field :transform, Transform.t() | nil, default: nil
field :raw, String.t() | map() | nil, default: nil
end
def new(raw) when is_binary(raw) or is_map(raw) do
%Chart {
raw: raw
}
end
def new(mark, encoding, opts \\ []) do
selection = opts[:selection] || nil
transform = opts[:transform] || nil
projection = opts[:projection] || nil
%Chart {
mark: mark,
encoding: encoding,
selection: selection,
transform: transform,
projection: projection
}
end
def to_json(chart, base_chart_opts \\ [])
def to_json(%Chart{} = chart, base_chart_opts) do
result = chart
|> Utils.to_map()
|> Enum.reduce(%{}, fn {k, v}, acc ->
cond do
is_struct(v) -> Map.put(acc, k, apply(v.__struct__, :to_json, [v]))
is_nil(v) -> acc
true -> Map.put(acc, k, v)
end
end)
Map.merge(result, Utils.encode(base_chart_opts))
end
def to_json(_chart, _base_chart_opts), do: raise "Please provide an Chart object to this function"
def repeat(chart, repeat, base_chart_opts \\ []) do
data = Chart.to_json(chart, base_chart_opts)
Utils.encode(%{spec: data, repeat: repeat})
end
def layer(charts, base_chart_opts \\ []) do
data = Utils.encode(base_chart_opts)
Map.put(data, :layer, charts_to_json(charts))
end
def hconcat(charts, base_chart_opts \\ []) do
data = Utils.encode(base_chart_opts)
Map.put(data, :hconcat, charts_to_json(charts))
end
def vconcat(charts, base_chart_opts \\ []) do
data = Utils.encode(base_chart_opts)
Map.put(data, :vconcat, charts_to_json(charts))
end
def concat(charts, columns \\ 2, base_chart_opts \\ []) do
data = Utils.encode(base_chart_opts)
data
|> Map.put(:columns, columns)
|> Map.put(:concat, Enum.map(charts, &Chart.to_json/1))
end
# private functions
defp charts_to_json(charts) do
Enum.map(charts, fn chart ->
case is_struct(chart) do
true -> apply(chart.__struct__, :to_json, [chart])
_ -> chart
end
end)
end
end
| 27.681319 | 100 | 0.638349 |
9e4cad21ad0fe205c5235504af0d08232eaaf6c2 | 3,072 | exs | Elixir | test/story_test.exs | LeviSchuck/madness | 2914f012bd4f40cef2ad39a6d0344a97baf35c79 | [
"MIT"
] | null | null | null | test/story_test.exs | LeviSchuck/madness | 2914f012bd4f40cef2ad39a6d0344a97baf35c79 | [
"MIT"
] | null | null | null | test/story_test.exs | LeviSchuck/madness | 2914f012bd4f40cef2ad39a6d0344a97baf35c79 | [
"MIT"
] | null | null | null | defmodule StoryTest do
use ExUnit.Case
doctest Madness.Story
test "the truth" do
assert 1 + 1 == 2
end
test "Make a story" do
story_sup_pid = Madness.Stories.Supervisor.whereis
{:ok, story_uuid} = Madness.Stories.Supervisor.add_story story_sup_pid
story_pid = Madness.Story.whereis story_uuid
{:ok, area_uuid} = Madness.Story.add_area story_pid
area_pid = Madness.Story.Area.whereis area_uuid
Madness.Story.Area.add_say(area_pid, :plain, "You wander an aimless road.")
Madness.Story.Area.add_say(area_pid, :plain, "It smells like fish.")
Madness.Story.Area.add_say(area_pid, "B", :plain, "It does not seem to lead anywhere.", 1)
Madness.Story.Area.add_condition(area_pid, "B", :eq, {:b, true})
{:ok, says} = Madness.Story.Area.list_says(area_pid, %{})
assert says == [
{:plain, "You wander an aimless road."},
{:plain, "It smells like fish."}
]
{:ok, says} = Madness.Story.Area.list_says(area_pid, %{b: true})
assert says == [
{:plain, "You wander an aimless road."},
{:plain, "It does not seem to lead anywhere."},
{:plain, "It smells like fish."}
]
end
test "Multiple steps" do
story_sup_pid = Madness.Stories.Supervisor.whereis
{:ok, story_uuid} = Madness.Stories.Supervisor.add_story story_sup_pid
story_pid = Madness.Story.whereis story_uuid
{:ok, area_uuid} = Madness.Story.add_area story_pid
area_pid = Madness.Story.Area.whereis area_uuid
Madness.Story.Area.add_step(area_pid, "test1", "Test A")
Madness.Story.Area.add_step(area_pid, "B", "test2", "Test B")
Madness.Story.Area.add_step(area_pid, "test3", "Test C")
Madness.Story.Area.add_condition(area_pid, "B", :eq, {:b, true})
{:ok, steps} = Madness.Story.Area.list_steps(area_pid, %{})
assert steps == [
{"test1", "Test A"},
{"test3", "Test C"}
]
{:ok, steps} = Madness.Story.Area.list_steps(area_pid, %{b: true})
assert steps == [
{"test1", "Test A"},
{"test2", "Test B"},
{"test3", "Test C"}
]
end
test "Transitions" do
story_sup_pid = Madness.Stories.Supervisor.whereis
{:ok, story_uuid} = Madness.Stories.Supervisor.add_story story_sup_pid
story_pid = Madness.Story.whereis story_uuid
{:ok, area1_uuid} = Madness.Story.add_area story_pid
{:ok, area2_uuid} = Madness.Story.add_area story_pid
area1_pid = Madness.Story.Area.whereis area1_uuid
area2_pid = Madness.Story.Area.whereis area2_uuid
Madness.Story.Area.add_step(area1_pid, "test1", "Test A")
Madness.Story.Area.add_transition_command(area1_pid, "test1", area2_uuid)
Madness.Story.Area.add_step(area2_pid, "test2", "Test B")
Madness.Story.Area.add_transition_command(area2_pid, "test2", area1_uuid)
{:ok, comms} = Madness.Story.Area.list_step_commands(area1_pid, "test1")
assert comms == [
{:transition, area2_uuid}
]
{:ok, comms} = Madness.Story.Area.list_step_commands(area2_pid, "test2")
assert comms == [
{:transition, area1_uuid}
]
end
end
| 34.133333 | 94 | 0.667969 |
9e4ce042c395ee057e8421f8622aae8346faef34 | 6,444 | ex | Elixir | lib/finch/telemetry.ex | balexand/finch | 19ad9389e003f3d437580a8fab16f51e930e9589 | [
"MIT"
] | null | null | null | lib/finch/telemetry.ex | balexand/finch | 19ad9389e003f3d437580a8fab16f51e930e9589 | [
"MIT"
] | 1 | 2020-12-15T20:14:43.000Z | 2020-12-15T20:14:43.000Z | lib/finch/telemetry.ex | balexand/finch | 19ad9389e003f3d437580a8fab16f51e930e9589 | [
"MIT"
] | null | null | null | defmodule Finch.Telemetry do
@moduledoc """
Telemetry integration.
Unless specified, all time's are in `:native` units.
Finch executes the following events:
* `[:finch, :queue, :start]` - Executed before checking out a connection from the pool.
#### Measurements
* `:system_time` - The system time
#### Metadata:
* `:pool` - The pool's pid
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `[:finch, :queue, :stop]` - Executed after a connection is retrieved from the pool.
#### Measurements
* `:duration` - Duration to check out a pool connection.
* `:idle_time` - Elapsed time since the connection was last checked in or initialized.
#### Metadata
* `:pool` - The pool's pid
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `[:finch, :queue, :exception]` - Executed if checking out a connection throws an exception.
#### Measurements
* `:duration` - The time it took before raising an exception
#### Metadata
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `:kind` - The type of exception.
* `:error` - Error description or error data.
* `:stacktrace` - The stacktrace
* `[:finch, :connect, :start]` - Executed before opening a new connection.
If a connection is being re-used this event will *not* be executed.
#### Measurements
* `:system_time` - The system time
#### Metadata
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `[:finch, :connect, :stop]` - Executed after a connection is opened.
#### Measurements
* `:duration` - Duration to connect to the host.
#### Metadata:
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `:error` - This value is optional. It includes any errors that occured while opening the connection.
* `[:finch, :request, :start]` - Executed before sending a request.
#### Measurements:
* `:system_time` - The system time
* `:idle_time` - Elapsed time since the connection was last checked in or initialized.
#### Metadata:
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `:path` - The request path.
* `:method` - The request method.
* `[:finch, :request, :stop]` - Executed after a request is finished.
#### Measurements:
* `:duration` - Duration to make the request.
* `:idle_time` - Elapsed time since the connection was last checked in or initialized.
#### Metadata:
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `:path` - The request path.
* `:method` - The request method.
* `:error` - This value is optional. It includes any errors that occured while making the request.
* `[:finch, :response, :start]` - Executed before receiving the response.
#### Measurements:
* `:system_time` - The system time
* `:idle_time` - Elapsed time since the connection was last checked in or initialized.
#### Metadata:
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `:path` - The request path.
* `:method` - The request method.
* `[:finch, :response, :stop]` - Executed after a response has been fully received.
#### Measurements:
* `:duration` - Duration to receive the response.
* `:idle_time` - Elapsed time since the connection was last checked in or initialized.
#### Metadata:
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `:path` - The request path.
* `:method` - The request method.
* `:error` - This value is optional. It includes any errors that occured while receiving the response.
* `[:finch, :reused_connection]` - Executed if an existing connection is reused. There are no measurements provided with this event.
#### Metadata:
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `[:finch, :max_idle_time_exceeded]` - Executed if a connection was discarded because the max_idle_time had been reached.
#### Measurements:
* `:idle_time` - Elapsed time since the connection was last checked in or initialized.
#### Metadata
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
"""
@doc false
# emits a `start` telemetry event and returns the the start time
def start(event, meta \\ %{}, extra_measurements \\ %{}) do
start_time = System.monotonic_time()
:telemetry.execute(
[:finch, event, :start],
Map.merge(extra_measurements, %{system_time: System.system_time()}),
meta
)
start_time
end
@doc false
# Emits a stop event.
def stop(event, start_time, meta \\ %{}, extra_measurements \\ %{}) do
end_time = System.monotonic_time()
measurements = Map.merge(extra_measurements, %{duration: end_time - start_time})
:telemetry.execute(
[:finch, event, :stop],
measurements,
meta
)
end
@doc false
def exception(event, start_time, kind, reason, stack, meta \\ %{}, extra_measurements \\ %{}) do
end_time = System.monotonic_time()
measurements = Map.merge(extra_measurements, %{duration: end_time - start_time})
meta =
meta
|> Map.put(:kind, kind)
|> Map.put(:error, reason)
|> Map.put(:stacktrace, stack)
:telemetry.execute([:finch, event, :exception], measurements, meta)
end
@doc false
# Used for reporting generic events
def event(event, measurements, meta) do
:telemetry.execute([:finch, event], measurements, meta)
end
end
| 33.216495 | 134 | 0.63594 |
9e4d037cedb85e05656108882a1bbe59eb8f4d36 | 744 | exs | Elixir | test/day01/inverse_captcha_test.exs | fboyer/advent_of_code_2017 | feb21ede4828865270a1f7d0d499ba21d1ed8f8b | [
"MIT"
] | null | null | null | test/day01/inverse_captcha_test.exs | fboyer/advent_of_code_2017 | feb21ede4828865270a1f7d0d499ba21d1ed8f8b | [
"MIT"
] | null | null | null | test/day01/inverse_captcha_test.exs | fboyer/advent_of_code_2017 | feb21ede4828865270a1f7d0d499ba21d1ed8f8b | [
"MIT"
] | null | null | null | defmodule Day01.InverseCaptchaTest do
use ExUnit.Case
import Day01.InverseCaptcha
describe "day 01 -" do
test "part 1 - can sum all of the digits that match the next digit in the list" do
assert calc_captcha("1122") == 3
assert calc_captcha("1111") == 4
assert calc_captcha("1234") == 0
assert calc_captcha("91212129") == 9
end
test "part 2 - can sum all of the digits that match the next digit halfway around the circular list" do
assert calc_circular_captcha("1212") == 6
assert calc_circular_captcha("1221") == 0
assert calc_circular_captcha("123425") == 4
assert calc_circular_captcha("123123") == 12
assert calc_circular_captcha("12131415") == 4
end
end
end
| 32.347826 | 107 | 0.678763 |
9e4d1aef49ed4612814a70b3417c3bde5d10a495 | 2,080 | exs | Elixir | config/dev.exs | AminArria/reserva | 13faef72927aeb84d456f080d65249b8ef50180e | [
"MIT"
] | null | null | null | config/dev.exs | AminArria/reserva | 13faef72927aeb84d456f080d65249b8ef50180e | [
"MIT"
] | 2 | 2021-03-09T00:49:12.000Z | 2021-05-08T03:13:05.000Z | config/dev.exs | AminArria/reserva | 13faef72927aeb84d456f080d65249b8ef50180e | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :reserva, ReservaWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :reserva, ReservaWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/reserva_web/views/.*(ex)$},
~r{lib/reserva_web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
# Configure your database
config :reserva, Reserva.Repo,
username: "postgres",
password: "postgres",
database: "reserva_dev",
hostname: "localhost",
pool_size: 10
| 27.368421 | 68 | 0.686538 |
9e4d4224cadea9ba797ac9b5a7db210edd0af967 | 349 | exs | Elixir | priv/repo/seeds.exs | knoebber/petaller | 3532db5a3688459127d2427af42e32ca0f494d44 | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | knoebber/petaller | 3532db5a3688459127d2427af42e32ca0f494d44 | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | knoebber/petaller | 3532db5a3688459127d2427af42e32ca0f494d44 | [
"MIT"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Purple.Repo.insert!(%Purple.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.083333 | 61 | 0.704871 |
9e4d544b128425f08a5c000e297925f4f9ff80d4 | 1,773 | exs | Elixir | test/model/serialization_test.exs | szTheory/neo4j_sips_models | 70c7ad45da4aec97dc56dcf951df878b9e801e47 | [
"MIT"
] | 8 | 2016-04-22T03:13:02.000Z | 2021-01-01T02:49:28.000Z | test/model/serialization_test.exs | szTheory/neo4j_sips_models | 70c7ad45da4aec97dc56dcf951df878b9e801e47 | [
"MIT"
] | 2 | 2016-10-06T07:20:19.000Z | 2020-03-05T02:04:52.000Z | test/model/serialization_test.exs | szTheory/neo4j_sips_models | 70c7ad45da4aec97dc56dcf951df878b9e801e47 | [
"MIT"
] | 5 | 2016-07-21T04:31:24.000Z | 2020-03-03T20:23:01.000Z | defmodule Model.SerializationTest do
use ExUnit.Case
test "serializes a model" do
person = Person.build(name: "John DOE", email: "[email protected]", doe_family: true, age: 30)
assert Person.to_json(person) == "{\"people\":[{\"updated_at\":null,\"neo4j_sips\":true,\"name\":\"John DOE\",\"married_to\":[],\"id\":null,\"friend_of\":[],\"errors\":null,\"email\":\"[email protected]\",\"doe_family\":true,\"created_at\":null,\"age\":30}]}"
end
test "serializes an array of models" do
people = [Person.build(id: 1, name: "John DOE"), Person.build(id: 2, name: "Jane DOE")]
assert Person.to_json(people) == "{\"people\":[{\"updated_at\":null,\"neo4j_sips\":true,\"name\":\"John DOE\",\"married_to\":[],\"id\":1,\"friend_of\":[],\"errors\":null,\"email\":null,\"doe_family\":false,\"created_at\":null,\"age\":null},{\"updated_at\":null,\"neo4j_sips\":true,\"name\":\"Jane DOE\",\"married_to\":[],\"id\":2,\"friend_of\":[],\"errors\":null,\"email\":null,\"doe_family\":false,\"created_at\":null,\"age\":null}]}"
end
test "serializes a model with relationships" do
john = Person.build(id: 1, name: "John DOE", email: "[email protected]", age: 30)
jane = Person.build(name: "Jane DOE", email: "[email protected]", age: 20, married_to: [john])
assert Person.to_json(jane) == "{\"people\":[{\"updated_at\":null,\"neo4j_sips\":true,\"name\":\"Jane DOE\",\"married_to\":[1],\"id\":null,\"friend_of\":[],\"errors\":null,\"email\":\"[email protected]\",\"doe_family\":false,\"created_at\":null,\"age\":20},{\"updated_at\":null,\"neo4j_sips\":true,\"name\":\"John DOE\",\"married_to\":[],\"id\":1,\"friend_of\":[],\"errors\":null,\"email\":\"[email protected]\",\"doe_family\":false,\"created_at\":null,\"age\":30}]}"
end
end
| 88.65 | 477 | 0.624929 |
9e4d6c9a5fd3c69f334ff4630e844917a55733a1 | 568 | ex | Elixir | template/$PROJECT_NAME$/lib/$PROJECT_NAME$/accounts/accounts.ex | luke-hawk/degree | 9274fc979ea0715d3bdf65a0adb14fdd2419db9a | [
"MIT"
] | 3 | 2019-08-01T08:28:24.000Z | 2019-08-18T14:50:32.000Z | template/$PROJECT_NAME$/lib/$PROJECT_NAME$/accounts/accounts.ex | luke-hawk/degree | 9274fc979ea0715d3bdf65a0adb14fdd2419db9a | [
"MIT"
] | 3 | 2021-03-09T12:14:26.000Z | 2022-02-10T18:46:15.000Z | template/$PROJECT_NAME$/lib/$PROJECT_NAME$/accounts/accounts.ex | luke-hawk/degree | 9274fc979ea0715d3bdf65a0adb14fdd2419db9a | [
"MIT"
] | null | null | null | defmodule <%= @project_name_camel_case %>.Accounts do
@moduledoc """
The Accounts context.
"""
import Ecto.Query, warn: false
alias <%= @project_name_camel_case %>.Repo
alias <%= @project_name_camel_case %>.Accounts.User
def list_users do
User
|> Repo.all()
end
def create_user(attrs \\ %{}) do
%User{}
|> User.registration_changeset(attrs)
|> Repo.insert()
end
def get_user(id) do
User
|> Repo.get(id)
end
def change_user_registration(%User{} = user) do
User.registration_changeset(user, %{})
end
end
| 18.322581 | 53 | 0.646127 |
9e4d6d6d7c80095a7d426a961b94d4b9dba15208 | 1,980 | ex | Elixir | lib/tilex_web/router.ex | dlederle/tilex | 63520eeb013a2a18174de0b6e41e8b99977dbc20 | [
"MIT"
] | null | null | null | lib/tilex_web/router.ex | dlederle/tilex | 63520eeb013a2a18174de0b6e41e8b99977dbc20 | [
"MIT"
] | null | null | null | lib/tilex_web/router.ex | dlederle/tilex | 63520eeb013a2a18174de0b6e41e8b99977dbc20 | [
"MIT"
] | null | null | null | defmodule TilexWeb.Router do
use TilexWeb, :router
@auth_controller Application.get_env(:tilex, :auth_controller)
pipeline :browser do
plug(:accepts, ["html"])
plug(:fetch_session)
plug(:fetch_flash)
plug(:protect_from_forgery)
plug(:put_secure_browser_headers)
plug(Tilex.Plug.BasicAuth)
plug(Tilex.Plug.FormatInjector)
end
pipeline :browser_auth do
plug(Guardian.Plug.Pipeline,
module: Tilex.Auth.Guardian,
error_handler: Tilex.Auth.ErrorHandler
)
plug(Guardian.Plug.VerifySession)
plug(Guardian.Plug.LoadResource, allow_blank: true)
end
pipeline :api do
plug(:accepts, ["json"])
end
scope "/api", TilexWeb do
pipe_through([:api])
get("/developer_posts.json", Api.DeveloperPostController, :index)
end
get("/rss", TilexWeb.FeedController, :index)
get("/pixel", TilexWeb.PixelController, :index)
scope "/", TilexWeb do
pipe_through([:browser, :browser_auth])
get("/admin", @auth_controller, :index)
delete("/auth/logout", AuthController, :delete)
get("/auth/:provider", AuthController, :request)
get("/auth/:provider/callback", AuthController, :callback)
post("/auth/:provider/callback", AuthController, :callback)
get("/statistics", StatsController, :index)
get("/developer/statistics", StatsController, :developer)
get("/sitemap.xml", SitemapController, :index)
get("/manifest.json", WebManifestController, :index)
get("/random", PostController, :random)
get("/authors/:name", DeveloperController, :show)
get("/profile/edit", DeveloperController, :edit)
put("/profile/edit", DeveloperController, :update)
get("/", PostController, :index)
resources("/posts", PostController, param: "titled_slug")
post("/posts/:slug/like.json", PostController, :like)
post("/posts/:slug/unlike.json", PostController, :unlike)
# catch-any route should be last
get("/:name", ChannelController, :show)
end
end
| 30 | 69 | 0.693434 |
9e4d71cdb3747fc124b6fcb6325c4948820ad4d1 | 1,006 | ex | Elixir | lib/challenge_gov/images.ex | smartlogic/Challenge_gov | b4203d1fcfb742dd17ecfadb9e9c56ad836d4254 | [
"CC0-1.0"
] | 9 | 2020-02-26T20:24:38.000Z | 2022-03-22T21:14:52.000Z | lib/challenge_gov/images.ex | smartlogic/Challenge_gov | b4203d1fcfb742dd17ecfadb9e9c56ad836d4254 | [
"CC0-1.0"
] | 15 | 2020-04-22T19:33:24.000Z | 2022-03-26T15:11:17.000Z | lib/challenge_gov/images.ex | smartlogic/Challenge_gov | b4203d1fcfb742dd17ecfadb9e9c56ad836d4254 | [
"CC0-1.0"
] | 4 | 2020-04-27T22:58:57.000Z | 2022-01-14T13:42:09.000Z | defmodule ChallengeGov.Images do
@moduledoc """
Common module for dealing with image conversion
"""
alias Stein.Storage.FileUpload
alias Stein.Storage.Temp
@type opts() :: Keyword.t()
@doc """
Convert an image file using image magick
"""
@spec convert(FileUpload.t(), opts()) :: {:ok, Path.t()} | {:error, :convert}
def convert(file, opts) do
{:ok, temp_path} = Temp.create(extname: Keyword.get(opts, :extname))
case Porcelain.exec("convert", convert_args(file.path, temp_path, opts)) do
%{status: 0} ->
{:ok, temp_path}
_ ->
{:error, :convert}
end
end
defp convert_args(file_path, temp_path, opts) do
[file_path | opt_args(opts)] ++ [temp_path]
end
defp opt_args([]), do: []
defp opt_args([opt | opts]) do
case opt do
{:thumbnail, size} ->
["-thumbnail", "#{size}^", "-auto-orient", "-gravity", "center", "-extent", size] ++
opt_args(opts)
_ ->
opt_args(opts)
end
end
end
| 22.863636 | 92 | 0.596421 |
9e4dd0892fb20c1f1c5ee48de85cf7a4adb263dc | 1,175 | ex | Elixir | lib/pixie/backends/ets/message_queue.ex | rdalin82/pixie | add50e2bd7fbd807c7b82cd10a2123828be4c58f | [
"MIT"
] | null | null | null | lib/pixie/backends/ets/message_queue.ex | rdalin82/pixie | add50e2bd7fbd807c7b82cd10a2123828be4c58f | [
"MIT"
] | null | null | null | lib/pixie/backends/ets/message_queue.ex | rdalin82/pixie | add50e2bd7fbd807c7b82cd10a2123828be4c58f | [
"MIT"
] | null | null | null | defmodule Pixie.Backend.ETS.MessageQueue do
use GenServer
@moduledoc """
This process keeps track of any messages waiting to be delivered
to clients.
"""
def start_link do
GenServer.start_link __MODULE__, [], name: __MODULE__
end
def init _options do
table = :ets.new __MODULE__, [:bag, :protected, :named_table, read_concurrency: true]
Process.flag :trap_exit, true
{:ok, table}
end
def queue client_id, messages do
GenServer.cast __MODULE__, {:queue, client_id, messages}
end
def dequeue client_id do
messages = :ets.select __MODULE__, [{{client_id, :"$2"}, [], [:"$2"]}]
case messages do
[] ->
[]
messages ->
GenServer.cast __MODULE__, {:delete, client_id, messages}
messages
end
end
def handle_cast {:queue, client_id, messages}, table do
tuples = Enum.map messages, fn(message)-> {client_id, message} end
:ets.insert __MODULE__, tuples
{:noreply, table}
end
def handle_cast {:delete, client_id, messages}, table do
Enum.each messages, fn(message)->
:ets.delete_object __MODULE__, {client_id, message}
end
{:noreply, table}
end
end
| 25 | 89 | 0.662128 |
9e4df1e62b9810c318b431ae8b0c2a9415f28e35 | 2,509 | ex | Elixir | lib/guess_who/engine.ex | alanvoss/guess_who | 75227d97202283ffeeb6134fb8b2a591a9c49fbe | [
"MIT"
] | null | null | null | lib/guess_who/engine.ex | alanvoss/guess_who | 75227d97202283ffeeb6134fb8b2a591a9c49fbe | [
"MIT"
] | null | null | null | lib/guess_who/engine.ex | alanvoss/guess_who | 75227d97202283ffeeb6134fb8b2a591a9c49fbe | [
"MIT"
] | 3 | 2021-10-07T23:28:18.000Z | 2021-10-08T00:54:14.000Z | defmodule GuessWho.Engine do
alias GuessWho.{Contender, Attributes, Game, Turn}
@max_turns 50
@spec score_all_contenders() :: [{binary(), Integer.t()}]
def score_all_contenders() do
modules = Contender.all_player_modules()
names = Enum.map(modules, & &1.name)
scores =
modules
|> Enum.map(&score_contender/1)
|> Enum.map(fn {_, _, total_score} -> total_score end)
Enum.zip(names, scores)
end
@spec score_contender(Contender.t() | Atom.t()) ::
{[Game.t()], %{Attribute.character() => Integer.t()}, Integer.t()}
def score_contender(contender) do
contender = get_contender(contender)
game_logs = match_contender_against_all_characters(contender)
per_character_score =
game_logs
|> Enum.map(&{&1.character, length(&1.turns)})
|> Map.new()
total_score =
per_character_score
|> Map.values()
|> Enum.sum()
{game_logs, per_character_score, total_score}
end
@spec match_contender_against_all_characters(Contender.t() | Atom.t()) :: [Game.t()]
def match_contender_against_all_characters(contender) do
contender = get_contender(contender)
Enum.map(Attributes.characters(), &start_game(contender, &1))
end
@spec start_game(Contender.t() | Atom.t(), Attributes.character()) :: Game.t()
def start_game(contender, character) do
contender = get_contender(contender)
{outcome, turns} = do_turns(contender, character, [], 0)
Game.new(contender, character, Enum.reverse(turns), outcome)
end
defp do_turns(_, _, [%Turn{response: {:name_guessed?, true}} | _] = turns, _) do
{:success, turns}
end
defp do_turns(_, _, turns, @max_turns) do
{:failure, turns}
end
defp do_turns(contender, character, turns, count) do
turn_arguments =
case turns do
[] -> [nil, nil]
[%Turn{response: response, state: state} | _] -> [response, state]
end
{query, state} = apply(contender.module, :turn, turn_arguments)
response = Attributes.character_matches?(character, query)
do_turns(contender, character, [Turn.new(query, response, state) | turns], count + 1)
end
defp get_contender(%Contender{} = contender), do: contender
defp get_contender(contender) when is_atom(contender), do: Contender.new(contender)
defp get_contender("Elixir." <> _contender = module_string) do
module_string |> String.to_atom() |> get_contender()
end
defp get_contender(contender), do: get_contender("Elixir." <> contender)
end
| 30.975309 | 89 | 0.673177 |
9e4e13db1df4c8ec7687f44ecb8e01f28f8d8bfe | 1,289 | ex | Elixir | lib/mpdex/playback.ex | bosko/mpdex | fe64ce453332770df99c12320bfeeb5ac8b801dc | [
"MIT"
] | null | null | null | lib/mpdex/playback.ex | bosko/mpdex | fe64ce453332770df99c12320bfeeb5ac8b801dc | [
"MIT"
] | null | null | null | lib/mpdex/playback.ex | bosko/mpdex | fe64ce453332770df99c12320bfeeb5ac8b801dc | [
"MIT"
] | null | null | null | defmodule Mpdex.Playback do
def crossfade(seconds) when is_integer(seconds) do
client().send("crossfade #{seconds}")
end
def next do
client().send("next")
end
def resume do
client().send("pause 0")
end
def pause do
client().send("pause 1")
end
def play(what, pos) do
case {what, pos} do
{:position, position} ->
client().send("play #{position}")
{:id, id} ->
client().send("playid #{id}")
_ ->
{:error, "Invalid options"}
end
end
def previous do
client().send("previous")
end
def random_off do
client().send("random 0")
end
def random_on do
client().send("random 1")
end
def repeat_off do
client().send("repeat 0")
end
def repeat_on do
client().send("repeat 1")
end
def seek(time) when is_float(time) do
client().send("seek #{time}")
end
def forward(time) when is_float(time) do
client().send("seek +#{time}")
end
def backward(time) when is_float(time) do
client().send("seek -#{time}")
end
def stop do
client().send("stop")
end
def volume(vol) when is_integer(vol) and vol >= 0 and vol <= 100 do
client().send("setvol #{vol}")
end
defp client() do
Application.get_env(:mpdex, :mpd_client)
end
end
| 17.186667 | 69 | 0.597362 |
9e4e1744f889087d153cd4d0177796be147bcfe4 | 4,116 | exs | Elixir | test/transform_test.exs | lnr0626/surface | 11ae4a8cfa167fc11c8f960e9d5821a057c9b5bb | [
"MIT"
] | 1 | 2021-06-04T20:46:52.000Z | 2021-06-04T20:46:52.000Z | test/transform_test.exs | lnr0626/surface | 11ae4a8cfa167fc11c8f960e9d5821a057c9b5bb | [
"MIT"
] | null | null | null | test/transform_test.exs | lnr0626/surface | 11ae4a8cfa167fc11c8f960e9d5821a057c9b5bb | [
"MIT"
] | null | null | null | defmodule Surface.TransformTest do
use ExUnit.Case
defmodule Span do
use Surface.Component
slot default
def render(assigns) do
~F"""
<span><#slot /></span>
"""
end
end
defmodule DivToSpan do
use Surface.Component
slot default
@impl true
def render(assigns) do
~F"""
<div><#slot /></div>
"""
end
@impl Surface.BaseComponent
def transform(node) do
send(self(), {DivToSpan, "transforming node"})
%{node | module: Span}
end
end
defmodule LiveDivToSpan do
use Surface.LiveComponent
slot default
@impl true
def render(assigns) do
~F"""
<div><#slot /></div>
"""
end
@impl Surface.BaseComponent
def transform(node) do
send(self(), {LiveDivToSpan, "transforming node"})
%{node | module: Span, type: Surface.Component}
end
end
defmodule LiveDivViewToSpan do
use Surface.LiveView
@impl true
def render(assigns) do
~F"""
<div></div>
"""
end
@impl Surface.BaseComponent
def transform(node) do
send(self(), {LiveDivViewToSpan, "transforming node"})
%{node | module: Span, type: Surface.Component}
end
end
defmodule MacroDivToSpan do
use Surface.MacroComponent
@impl true
def expand(_, _, _) do
Surface.Compiler.compile(
"""
<span>This is a test component. Don't do this at home.</span>
""",
1,
__ENV__
)
end
@impl Surface.BaseComponent
def transform(node) do
send(self(), {MacroDivToSpan, "transforming node"})
node
end
end
defmodule ListProp do
use Surface.Component
prop prop, :list
@impl true
def render(assigns) do
~F"""
<span></span>
"""
end
@impl Surface.BaseComponent
def transform(node) do
send(self(), {ListProp, "transforming node"})
node
end
end
test "transform is run on compile when defined for Surface.Component" do
code = """
<DivToSpan>Some content</DivToSpan>
"""
[node | _] = Surface.Compiler.compile(code, 1, __ENV__)
assert_receive {DivToSpan, "transforming node"}
assert %Surface.AST.Component{
module: Span
} = node
end
test "transform is run on compile when defined for Surface.LiveComponent" do
code = """
<LiveDivToSpan id="div">Some content</LiveDivToSpan>
"""
[node | _] = Surface.Compiler.compile(code, 1, __ENV__)
assert_receive {LiveDivToSpan, "transforming node"}
assert %Surface.AST.Component{
module: Span
} = node
end
test "transform is run on compile when defined for Surface.LiveView" do
code = """
<LiveDivViewToSpan id="view" />
"""
[node | _] = Surface.Compiler.compile(code, 1, __ENV__)
assert_receive {LiveDivViewToSpan, "transforming node"}
assert %Surface.AST.Component{
module: Span
} = node
end
test "transform is NOT run on compile when defined for Surface.MacroComponent" do
code = """
<#MacroDivToSpan>Some content</#MacroDivToSpan>
"""
[node | _] = Surface.Compiler.compile(code, 1, __ENV__)
refute_receive {MacroDivToSpan, "transforming node"}
assert %Surface.AST.Container{} = node
end
test "transform is not run on parse errors" do
code = """
<DivToSpan>Invalid syntax (missing end tag)
"""
assert_raise(
Surface.Compiler.ParseError,
"nofile:1:2: expected closing node for <DivToSpan> defined on line 1, got EOF",
fn ->
Surface.Compiler.compile(code, 1, __ENV__)
end
)
refute_receive {DivToSpan, "transforming node"}
end
test "transform is not run on compile errors" do
code = """
<ListProp prop="string" />
"""
assert_raise(
CompileError,
"nofile:1: invalid value for property \"prop\". Expected a :list, got: \"string\".",
fn ->
Surface.Compiler.compile(code, 1, __ENV__)
end
)
refute_receive {ListProp, "transforming node"}
end
end
| 20.893401 | 90 | 0.607629 |
9e4e3782bfcba8a13b8ee4f820dedf6d52100702 | 1,067 | exs | Elixir | mix.exs | ankurp/chat | a9fced84898c9d47a17ea14fda6ed78e57ef04e8 | [
"MIT"
] | null | null | null | mix.exs | ankurp/chat | a9fced84898c9d47a17ea14fda6ed78e57ef04e8 | [
"MIT"
] | null | null | null | mix.exs | ankurp/chat | a9fced84898c9d47a17ea14fda6ed78e57ef04e8 | [
"MIT"
] | null | null | null | defmodule Chat.Mixfile do
use Mix.Project
def project do
[app: :chat,
version: "0.0.1",
elixir: "~> 1.0",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[mod: {Chat, []},
applications: [:phoenix, :phoenix_html, :cowboy, :logger,
:phoenix_ecto, :sqlite_ecto]]
end
# Specifies which paths to compile per environment
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies
#
# Type `mix help deps` for examples and options
defp deps do
[{:phoenix, "~> 1.0.2"},
{:phoenix_ecto, "~> 1.1"},
{:sqlite_ecto, "~> 1.0.1"},
{:phoenix_html, "~> 2.1"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:cowboy, "~> 1.0"}]
end
end
| 26.675 | 63 | 0.596064 |
9e4e7c43eecd8b7dd7b78dd5a78ab5934469221b | 1,004 | ex | Elixir | elixir/lib/year_2015/day_02.ex | fdm1/advent_of_code | a1e91d847fd8fd9f6b2f48333203729b9d64fd80 | [
"MIT"
] | null | null | null | elixir/lib/year_2015/day_02.ex | fdm1/advent_of_code | a1e91d847fd8fd9f6b2f48333203729b9d64fd80 | [
"MIT"
] | null | null | null | elixir/lib/year_2015/day_02.ex | fdm1/advent_of_code | a1e91d847fd8fd9f6b2f48333203729b9d64fd80 | [
"MIT"
] | 1 | 2018-12-02T20:30:23.000Z | 2018-12-02T20:30:23.000Z | # 2015 - Day 02: http://adventofcode.com/2015/day/2
defmodule ElixirAdvent.Year2015.Day02 do
def part1(input) do
Enum.sum(Enum.map(input_to_dim_list(input), fn(dim) -> paper_for_box(dim) end))
end
def part2(input) do
Enum.sum(Enum.map(input_to_dim_list(input), fn(dim) -> ribbon_for_box(dim) end))
end
def input_to_dim_list(input) do
dim_strings = String.split(input, "\n")
Enum.map(dim_strings, fn(s) ->
Enum.map(String.split(s, "x"), fn(i) ->
String.to_integer(i)
end)
end)
end
def paper_for_box(dim) do
sides = Enum.map(
0..length(dim)-1,
fn i -> Enum.at(dim, i) * Enum.at(dim, i-1) end
)
min_side = Enum.min(sides)
Enum.sum(Enum.map(sides, fn(side) -> 2 * side end)) + min_side
end
def ribbon_for_box(dim) do
small_edges = Enum.drop(Enum.sort(dim), -1)
edges = Enum.reduce(small_edges, 0, fn i, acc -> acc + (2 * i) end)
bow = Enum.reduce(dim, fn i, acc -> acc * i end)
edges + bow
end
end
| 26.421053 | 84 | 0.623506 |
9e4e814fd5ca221a6da90f6b32e44ec592ba5a12 | 186 | ex | Elixir | lib/bio/virus/generator.ex | xire28/bio | c3266b551f8ac855653d6de7f13fd566f2daa549 | [
"MIT"
] | null | null | null | lib/bio/virus/generator.ex | xire28/bio | c3266b551f8ac855653d6de7f13fd566f2daa549 | [
"MIT"
] | null | null | null | lib/bio/virus/generator.ex | xire28/bio | c3266b551f8ac855653d6de7f13fd566f2daa549 | [
"MIT"
] | null | null | null | defmodule Bio.Virus.Generator do
alias Bio.{Physic, Virus}
@min_mass 5
@max_mass 10
def create(), do: Virus.new(Physic.Generator.create(Enum.random(@min_mass..@max_mass)))
end
| 20.666667 | 89 | 0.725806 |
9e4ea8a9b1cb26ce2797e659db2bd0f9029a4ae5 | 3,231 | ex | Elixir | clients/fitness/lib/google_api/fitness/v1/model/session.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/fitness/lib/google_api/fitness/v1/model/session.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/fitness/lib/google_api/fitness/v1/model/session.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Fitness.V1.Model.Session do
@moduledoc """
Sessions contain metadata, such as a user-friendly name and time interval information.
## Attributes
* `activeTimeMillis` (*type:* `String.t`, *default:* `nil`) - Session active time. While start_time_millis and end_time_millis define the full session time, the active time can be shorter and specified by active_time_millis. If the inactive time during the session is known, it should also be inserted via a com.google.activity.segment data point with a STILL activity value
* `activityType` (*type:* `integer()`, *default:* `nil`) - The type of activity this session represents.
* `application` (*type:* `GoogleApi.Fitness.V1.Model.Application.t`, *default:* `nil`) - The application that created the session.
* `description` (*type:* `String.t`, *default:* `nil`) - A description for this session.
* `endTimeMillis` (*type:* `String.t`, *default:* `nil`) - An end time, in milliseconds since epoch, inclusive.
* `id` (*type:* `String.t`, *default:* `nil`) - A client-generated identifier that is unique across all sessions owned by this particular user.
* `modifiedTimeMillis` (*type:* `String.t`, *default:* `nil`) - A timestamp that indicates when the session was last modified.
* `name` (*type:* `String.t`, *default:* `nil`) - A human readable name of the session.
* `startTimeMillis` (*type:* `String.t`, *default:* `nil`) - A start time, in milliseconds since epoch, inclusive.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:activeTimeMillis => String.t(),
:activityType => integer(),
:application => GoogleApi.Fitness.V1.Model.Application.t(),
:description => String.t(),
:endTimeMillis => String.t(),
:id => String.t(),
:modifiedTimeMillis => String.t(),
:name => String.t(),
:startTimeMillis => String.t()
}
field(:activeTimeMillis)
field(:activityType)
field(:application, as: GoogleApi.Fitness.V1.Model.Application)
field(:description)
field(:endTimeMillis)
field(:id)
field(:modifiedTimeMillis)
field(:name)
field(:startTimeMillis)
end
defimpl Poison.Decoder, for: GoogleApi.Fitness.V1.Model.Session do
def decode(value, options) do
GoogleApi.Fitness.V1.Model.Session.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Fitness.V1.Model.Session do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 45.507042 | 378 | 0.70195 |
9e4ec9ac13da5bc28e12e95ebb99d31d8a504444 | 637 | exs | Elixir | apps/man_api/priv/repo/migrations/20181222092056_create_reimbursement_contract_request_template.exs | edenlabllc/man.api.public | 010016c5ecc209413a56ee1f8e9e6fa31da8de18 | [
"MIT"
] | null | null | null | apps/man_api/priv/repo/migrations/20181222092056_create_reimbursement_contract_request_template.exs | edenlabllc/man.api.public | 010016c5ecc209413a56ee1f8e9e6fa31da8de18 | [
"MIT"
] | null | null | null | apps/man_api/priv/repo/migrations/20181222092056_create_reimbursement_contract_request_template.exs | edenlabllc/man.api.public | 010016c5ecc209413a56ee1f8e9e6fa31da8de18 | [
"MIT"
] | null | null | null | defmodule Man.Repo.Migrations.CreateReimbursementContractRequestTemplate do
use Ecto.Migration
import Ecto.Changeset
alias Man.Templates.Template
alias Man.Repo
def change do
%Template{}
|> cast(
%{
"title" => "RCRPF",
"description" => "Reimbursement contract request printout form",
"syntax" => "iex",
"locales" => [%{"code" => "uk_UA", "params" => %{}}],
"body" =>
"<!DOCTYPE html><html>Reimbursement contract request printout form</html>"
},
~w(id title description syntax body)a
)
|> cast_embed(:locales)
|> Repo.insert!()
end
end
| 25.48 | 84 | 0.601256 |
9e4ee2ce4256ea921b3cda65772c99ae46d70c34 | 1,211 | ex | Elixir | lib/incident/event_store_supervisor.ex | OleMchls/incident | 220e46598719fc50d10c96e5a848080172bf6f9b | [
"MIT"
] | null | null | null | lib/incident/event_store_supervisor.ex | OleMchls/incident | 220e46598719fc50d10c96e5a848080172bf6f9b | [
"MIT"
] | null | null | null | lib/incident/event_store_supervisor.ex | OleMchls/incident | 220e46598719fc50d10c96e5a848080172bf6f9b | [
"MIT"
] | null | null | null | defmodule Incident.EventStoreSupervisor do
@moduledoc false
use Supervisor
alias Incident.EventStore
alias Incident.EventStore.{InMemory, Postgres}
@type adapter :: InMemory.Adapter | Postgres.Adapter
@type lock_manager :: InMemory.LockManager | Postgres.LockManager
@doc """
Starts the Event Store Supervisor that monitors the Event Store and Lock Manager.
"""
@spec start_link(map()) :: Supervisor.on_start()
def start_link(config) do
Supervisor.start_link(__MODULE__, config, name: __MODULE__)
end
@impl true
def init(%{adapter: adapter, options: options} = config) do
lock_manager = [lock_manager: lock_manager_for(adapter)]
event_store_config = Map.update(config, :options, lock_manager, &(&1 ++ lock_manager))
lock_manager_config = Keyword.get(options, :lock_manager_config, [])
children = [
{lock_manager_for(adapter), lock_manager_config},
{EventStore, event_store_config}
]
Supervisor.init(children, strategy: :one_for_one)
end
@spec lock_manager_for(adapter()) :: lock_manager()
defp lock_manager_for(Postgres.Adapter), do: Postgres.LockManager
defp lock_manager_for(InMemory.Adapter), do: InMemory.LockManager
end
| 30.275 | 90 | 0.74071 |
9e4ef16ee53ca5ec7b9cbc02e70afbf400f4ed6e | 444 | exs | Elixir | test/contour_builder_test.exs | NAISorg/conrex | bc9c6297eec2f8e4f135c38773459ceeb97938c9 | [
"MIT"
] | 51 | 2020-11-19T00:41:53.000Z | 2022-03-15T13:50:54.000Z | test/contour_builder_test.exs | NAISorg/conrex | bc9c6297eec2f8e4f135c38773459ceeb97938c9 | [
"MIT"
] | 1 | 2020-11-12T22:35:08.000Z | 2020-11-16T21:48:34.000Z | test/contour_builder_test.exs | NAISorg/conrex | bc9c6297eec2f8e4f135c38773459ceeb97938c9 | [
"MIT"
] | 1 | 2022-01-11T21:58:23.000Z | 2022-01-11T21:58:23.000Z | defmodule Conrex.ContourBuilderTest do
use ExUnit.Case
alias Conrex.ContourBuilder
test "it builds rings from a segment list" do
rings = ContourBuilder.build_rings([
{{0, 0}, {0, 1}},
{{3, 6}, {2, 5}},
{{1, 1}, {0, 0}},
{{2, 5}, {2, 6}},
{{0, 1}, {1, 1}},
{{2, 6}, {3, 6}}
])
assert rings == [
[{3, 6}, {2, 5}, {2, 6}, {3, 6}],
[{1, 1}, {0, 0}, {0, 1}, {1, 1}]
]
end
end
| 22.2 | 47 | 0.439189 |
9e4efd4cc64ffb095712976ae505b789740ce906 | 19,776 | ex | Elixir | clients/content/lib/google_api/content/v2/api/datafeeds.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/api/datafeeds.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/api/datafeeds.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Api.Datafeeds do
@moduledoc """
API calls for all endpoints tagged `Datafeeds`.
"""
alias GoogleApi.Content.V2.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Deletes, fetches, gets, inserts and updates multiple datafeeds in a single request.
## Parameters
* `connection` (*type:* `GoogleApi.Content.V2.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:dryRun` (*type:* `boolean()`) - Flag to simulate a request like in a live environment. If set to true, dry-run mode checks the validity of the request and returns errors (if any).
* `:body` (*type:* `GoogleApi.Content.V2.Model.DatafeedsCustomBatchRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Content.V2.Model.DatafeedsCustomBatchResponse{}}` on success
* `{:error, info}` on failure
"""
@spec content_datafeeds_custombatch(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.Content.V2.Model.DatafeedsCustomBatchResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def content_datafeeds_custombatch(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:dryRun => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/datafeeds/batch", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.Content.V2.Model.DatafeedsCustomBatchResponse{}]
)
end
@doc """
Deletes a datafeed configuration from your Merchant Center account.
## Parameters
* `connection` (*type:* `GoogleApi.Content.V2.Connection.t`) - Connection to server
* `merchant_id` (*type:* `String.t`) - The ID of the account that manages the datafeed. This account cannot be a multi-client account.
* `datafeed_id` (*type:* `String.t`) - The ID of the datafeed.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:dryRun` (*type:* `boolean()`) - Flag to simulate a request like in a live environment. If set to true, dry-run mode checks the validity of the request and returns errors (if any).
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %{}}` on success
* `{:error, info}` on failure
"""
@spec content_datafeeds_delete(Tesla.Env.client(), String.t(), String.t(), keyword(), keyword()) ::
{:ok, nil} | {:ok, Tesla.Env.t()} | {:error, Tesla.Env.t()}
def content_datafeeds_delete(
connection,
merchant_id,
datafeed_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:dryRun => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/{merchantId}/datafeeds/{datafeedId}", %{
"merchantId" => URI.encode(merchant_id, &URI.char_unreserved?/1),
"datafeedId" => URI.encode(datafeed_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [decode: false])
end
@doc """
Invokes a fetch for the datafeed in your Merchant Center account.
## Parameters
* `connection` (*type:* `GoogleApi.Content.V2.Connection.t`) - Connection to server
* `merchant_id` (*type:* `String.t`) - The ID of the account that manages the datafeed. This account cannot be a multi-client account.
* `datafeed_id` (*type:* `String.t`) - The ID of the datafeed to be fetched.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:dryRun` (*type:* `boolean()`) - Flag to simulate a request like in a live environment. If set to true, dry-run mode checks the validity of the request and returns errors (if any).
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Content.V2.Model.DatafeedsFetchNowResponse{}}` on success
* `{:error, info}` on failure
"""
@spec content_datafeeds_fetchnow(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Content.V2.Model.DatafeedsFetchNowResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def content_datafeeds_fetchnow(
connection,
merchant_id,
datafeed_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:dryRun => :query
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/{merchantId}/datafeeds/{datafeedId}/fetchNow", %{
"merchantId" => URI.encode(merchant_id, &URI.char_unreserved?/1),
"datafeedId" => URI.encode(datafeed_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Content.V2.Model.DatafeedsFetchNowResponse{}])
end
@doc """
Retrieves a datafeed configuration from your Merchant Center account.
## Parameters
* `connection` (*type:* `GoogleApi.Content.V2.Connection.t`) - Connection to server
* `merchant_id` (*type:* `String.t`) - The ID of the account that manages the datafeed. This account cannot be a multi-client account.
* `datafeed_id` (*type:* `String.t`) - The ID of the datafeed.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Content.V2.Model.Datafeed{}}` on success
* `{:error, info}` on failure
"""
@spec content_datafeeds_get(Tesla.Env.client(), String.t(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Content.V2.Model.Datafeed.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def content_datafeeds_get(
connection,
merchant_id,
datafeed_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{merchantId}/datafeeds/{datafeedId}", %{
"merchantId" => URI.encode(merchant_id, &URI.char_unreserved?/1),
"datafeedId" => URI.encode(datafeed_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Content.V2.Model.Datafeed{}])
end
@doc """
Registers a datafeed configuration with your Merchant Center account.
## Parameters
* `connection` (*type:* `GoogleApi.Content.V2.Connection.t`) - Connection to server
* `merchant_id` (*type:* `String.t`) - The ID of the account that manages the datafeed. This account cannot be a multi-client account.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:dryRun` (*type:* `boolean()`) - Flag to simulate a request like in a live environment. If set to true, dry-run mode checks the validity of the request and returns errors (if any).
* `:body` (*type:* `GoogleApi.Content.V2.Model.Datafeed.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Content.V2.Model.Datafeed{}}` on success
* `{:error, info}` on failure
"""
@spec content_datafeeds_insert(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Content.V2.Model.Datafeed.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def content_datafeeds_insert(connection, merchant_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:dryRun => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/{merchantId}/datafeeds", %{
"merchantId" => URI.encode(merchant_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Content.V2.Model.Datafeed{}])
end
@doc """
Lists the configurations for datafeeds in your Merchant Center account.
## Parameters
* `connection` (*type:* `GoogleApi.Content.V2.Connection.t`) - Connection to server
* `merchant_id` (*type:* `String.t`) - The ID of the account that manages the datafeeds. This account cannot be a multi-client account.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:maxResults` (*type:* `integer()`) - The maximum number of products to return in the response, used for paging.
* `:pageToken` (*type:* `String.t`) - The token returned by the previous request.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Content.V2.Model.DatafeedsListResponse{}}` on success
* `{:error, info}` on failure
"""
@spec content_datafeeds_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Content.V2.Model.DatafeedsListResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def content_datafeeds_list(connection, merchant_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:maxResults => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{merchantId}/datafeeds", %{
"merchantId" => URI.encode(merchant_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Content.V2.Model.DatafeedsListResponse{}])
end
@doc """
Updates a datafeed configuration of your Merchant Center account.
## Parameters
* `connection` (*type:* `GoogleApi.Content.V2.Connection.t`) - Connection to server
* `merchant_id` (*type:* `String.t`) - The ID of the account that manages the datafeed. This account cannot be a multi-client account.
* `datafeed_id` (*type:* `String.t`) - The ID of the datafeed.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:dryRun` (*type:* `boolean()`) - Flag to simulate a request like in a live environment. If set to true, dry-run mode checks the validity of the request and returns errors (if any).
* `:body` (*type:* `GoogleApi.Content.V2.Model.Datafeed.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Content.V2.Model.Datafeed{}}` on success
* `{:error, info}` on failure
"""
@spec content_datafeeds_update(Tesla.Env.client(), String.t(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Content.V2.Model.Datafeed.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def content_datafeeds_update(
connection,
merchant_id,
datafeed_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:dryRun => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url("/{merchantId}/datafeeds/{datafeedId}", %{
"merchantId" => URI.encode(merchant_id, &URI.char_unreserved?/1),
"datafeedId" => URI.encode(datafeed_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Content.V2.Model.Datafeed{}])
end
end
| 44.945455 | 191 | 0.631169 |
9e4efdb9bda558bec7ed9bebde2066eb13a9b436 | 5,231 | exs | Elixir | test/oidc/auth_test.exs | tanguilp/oidc | 3a06cc72c88e7690cfee1ba35501fb06d2009756 | [
"Apache-2.0"
] | null | null | null | test/oidc/auth_test.exs | tanguilp/oidc | 3a06cc72c88e7690cfee1ba35501fb06d2009756 | [
"Apache-2.0"
] | 5 | 2020-05-30T14:09:52.000Z | 2022-03-09T17:12:47.000Z | test/oidc/auth_test.exs | tanguilp/oidc | 3a06cc72c88e7690cfee1ba35501fb06d2009756 | [
"Apache-2.0"
] | 3 | 2021-09-03T07:58:07.000Z | 2021-09-21T12:27:29.000Z | defmodule OIDC.AuthTest do
use ExUnit.Case
alias OIDC.Auth
import OIDCTest.Support.Helpers
@nonce "a nonce"
setup_all do
client = client_conf()
op = op_metadata()
Tesla.Mock.mock_global(fn
%{method: :get, url: "https://example.com/.well-known/openid-configuration"} ->
%Tesla.Env{status: 404}
%{method: :post, url: "https://example.com/token"} ->
resp = %{
"access_token" => "an_access_token",
"token_type" => "Bearer",
"expires_in" => 600,
"refresh_token" => "a_refresh_token",
"id_token" => signed_id_token(op, client, nonce: @nonce)
}
resp_headers = [
{"Cache-Control", "no-cache, no-store, max-age=0, stale-while-revalidate=300"},
{"pragma", "no-cache"}
]
%Tesla.Env{status: 200, body: resp, headers: resp_headers}
end)
[client: client, op: op]
end
describe "verify_challenge/2" do
setup [:create_challenge]
test "valid response with response type code", %{challenge: challenge} do
challenge = Map.put(challenge, :response_type, "code") |> Auth.gen_challenge()
op_response = %{
"code" => "authz_code"
}
assert {:ok, _} = Auth.verify_response(op_response, challenge)
end
test "valid response with response type id_token", %{
client: client,
op: op,
challenge: challenge
} do
challenge =
Map.put(challenge, :response_type, "id_token")
|> Auth.gen_challenge()
|> Map.put(:nonce, @nonce)
op_response = %{
"id_token" => signed_id_token(op, client, nonce: @nonce)
}
assert {:ok, _} = Auth.verify_response(op_response, challenge)
end
test "valid response with response type id_token token", %{
client: client,
op: op,
challenge: challenge
} do
challenge =
Map.put(challenge, :response_type, "id_token token")
|> Auth.gen_challenge()
|> Map.put(:nonce, @nonce)
op_response = %{
"id_token" =>
signed_id_token(
op,
client,
nonce: @nonce,
at_hash: token_hash("abcdef", List.first(op["jwks"]["keys"]))
),
"access_token" => "abcdef",
"token_type" => "bearer"
}
assert {:ok, _} = Auth.verify_response(op_response, challenge)
end
test "valid response with response type code id_token", %{
client: client,
op: op,
challenge: challenge
} do
challenge =
Map.put(challenge, :response_type, "code id_token")
|> Auth.gen_challenge()
|> Map.put(:nonce, @nonce)
op_response = %{
"code" => "authz_code",
"id_token" =>
signed_id_token(
op,
client,
nonce: @nonce,
c_hash: token_hash("authz_code", List.first(op["jwks"]["keys"]))
)
}
assert {:ok, _} = Auth.verify_response(op_response, challenge)
end
test "valid response with response type code token", %{challenge: challenge} do
challenge =
Map.put(challenge, :response_type, "code token")
|> Auth.gen_challenge()
|> Map.put(:nonce, @nonce)
op_response = %{
"code" => "authz_code",
"access_token" => "abcdef",
"token_type" => "bearer"
}
assert {:ok, _} = Auth.verify_response(op_response, challenge)
end
test "valid response with response type code id_token token", %{
client: client,
op: op,
challenge: challenge
} do
challenge =
Map.put(challenge, :response_type, "code id_token token")
|> Auth.gen_challenge()
|> Map.put(:nonce, @nonce)
op_response = %{
"code" => "authz_code",
"access_token" => "abcdef",
"token_type" => "bearer",
"id_token" =>
signed_id_token(
op,
client,
nonce: @nonce,
at_hash: token_hash("abcdef", List.first(op["jwks"]["keys"])),
c_hash: token_hash("authz_code", List.first(op["jwks"]["keys"]))
)
}
assert {:ok, _} = Auth.verify_response(op_response, challenge)
end
end
describe "request_uri/2" do
setup [:create_challenge]
test "authorization endpoint retains its query parameters", %{challenge: challenge_opts} do
challenge = Map.put(challenge_opts, :response_type, "code") |> Auth.gen_challenge()
request_uri = Auth.request_uri(challenge, challenge_opts) |> URI.to_string()
assert request_uri =~ "a=1"
end
end
defp create_challenge(%{client: client, op: op} = context) do
challenge = %{
issuer: op["issuer"],
client_id: client["client_id"],
client_config: OIDCTest.Support.Helpers,
redirect_uri: "https://rp.com/redirect_uri",
server_metadata: op,
id_token_iat_max_time_gap: 5
}
Map.put(context, :challenge, challenge)
end
defp token_hash(token, jwk) do
hashed_token = :crypto.hash(JOSEUtils.JWK.sig_alg_digest(jwk), token)
hashed_token
|> binary_part(0, div(byte_size(hashed_token), 2))
|> Base.url_encode64(padding: false)
end
end
| 27.103627 | 95 | 0.57943 |
9e4f1e7838a861875a5c70a6ee3577e16d1e897a | 16,440 | exs | Elixir | test/elixir/test/partition_mango_test.exs | mtenrero/couchdb-vetcontrol | b7ede3ededdf0072c73f08d8f1217cb723b03f7a | [
"Apache-2.0"
] | null | null | null | test/elixir/test/partition_mango_test.exs | mtenrero/couchdb-vetcontrol | b7ede3ededdf0072c73f08d8f1217cb723b03f7a | [
"Apache-2.0"
] | null | null | null | test/elixir/test/partition_mango_test.exs | mtenrero/couchdb-vetcontrol | b7ede3ededdf0072c73f08d8f1217cb723b03f7a | [
"Apache-2.0"
] | null | null | null | defmodule PartitionMangoTest do
use CouchTestCase
import PartitionHelpers, except: [get_partitions: 1]
@moduledoc """
Test Partition functionality for mango
"""
def create_index(db_name, fields \\ ["some"], opts \\ %{}) do
default_index = %{
index: %{
fields: fields
}
}
index = Enum.into(opts, default_index)
resp = Couch.post("/#{db_name}/_index", body: index)
assert resp.status_code == 200
assert resp.body["result"] == "created"
assert resp.body["id"] != nil
assert resp.body["name"] != nil
# wait until the database reports the index as available
retry_until(fn ->
get_index(db_name, resp.body["id"], resp.body["name"]) != nil
end)
end
def list_indexes(db_name) do
resp = Couch.get("/#{db_name}/_index")
assert resp.status_code == 200
resp.body["indexes"]
end
def get_index(db_name, ddocid, name) do
indexes = list_indexes(db_name)
Enum.find(indexes, fn(index) ->
match?(%{"ddoc" => ^ddocid, "name" => ^name}, index)
end)
end
def get_partitions(resp) do
%{:body => %{"docs" => docs}} = resp
Enum.map(docs, fn doc ->
[partition, _] = String.split(doc["_id"], ":")
partition
end)
end
@tag :with_partitioned_db
test "query using _id and partition works", context do
db_name = context[:db_name]
create_partition_docs(db_name)
create_index(db_name)
url = "/#{db_name}/_partition/foo/_find"
resp =
Couch.post(
url,
body: %{
selector: %{
_id: %{
"$gt": "foo:"
}
},
limit: 20
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 20
assert_correct_partition(partitions, "foo")
url = "/#{db_name}/_find"
resp =
Couch.post(
url,
body: %{
selector: %{
_id: %{
"$lt": "foo:"
}
},
limit: 20
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 20
assert_correct_partition(partitions, "bar")
end
@tag :with_partitioned_db
test "query using _id works for global and local query", context do
db_name = context[:db_name]
create_partition_docs(db_name)
create_index(db_name)
url = "/#{db_name}/_partition/foo/_find"
resp =
Couch.post(
url,
body: %{
selector: %{
_id: %{
"$gt": 0
}
},
limit: 20
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 20
assert_correct_partition(partitions, "foo")
url = "/#{db_name}/_find"
resp =
Couch.post(
url,
body: %{
selector: %{
_id: %{
"$gt": 0
}
},
limit: 20
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 20
assert_correct_partition(partitions, "bar")
end
@tag :with_partitioned_db
test "query with partitioned:true using index and $eq", context do
db_name = context[:db_name]
create_partition_docs(db_name)
create_index(db_name)
url = "/#{db_name}/_partition/foo/_find"
resp =
Couch.post(
url,
body: %{
selector: %{
some: "field"
},
limit: 20
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 20
assert_correct_partition(partitions, "foo")
url = "/#{db_name}/_partition/bar/_find"
resp =
Couch.post(
url,
body: %{
selector: %{
some: "field"
},
limit: 20
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 20
assert_correct_partition(partitions, "bar")
end
@tag :with_partitioned_db
test "partitioned query using _all_docs with $eq", context do
db_name = context[:db_name]
create_partition_docs(db_name)
url = "/#{db_name}/_partition/foo/_find"
resp =
Couch.post(
url,
body: %{
selector: %{
some: "field"
},
limit: 20
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 20
assert_correct_partition(partitions, "foo")
url = "/#{db_name}/_partition/bar/_find"
resp =
Couch.post(
url,
body: %{
selector: %{
some: "field"
},
limit: 20
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 20
assert_correct_partition(partitions, "bar")
end
@tag :with_db
test "non-partitioned query using _all_docs and $eq", context do
db_name = context[:db_name]
create_partition_docs(db_name)
url = "/#{db_name}/_find"
resp =
Couch.post(
url,
body: %{
selector: %{
some: "field"
},
skip: 40,
limit: 5
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 5
assert partitions == ["bar", "bar", "bar", "bar", "bar"]
url = "/#{db_name}/_find"
resp =
Couch.post(
url,
body: %{
selector: %{
some: "field"
},
skip: 50,
limit: 5
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 5
assert partitions == ["foo", "foo", "foo", "foo", "foo"]
end
@tag :with_partitioned_db
test "partitioned query using index and range scan", context do
db_name = context[:db_name]
create_partition_docs(db_name, "foo", "bar42")
create_index(db_name, ["value"])
url = "/#{db_name}/_partition/foo/_find"
resp =
Couch.post(
url,
body: %{
selector: %{
value: %{
"$gte": 6,
"$lt": 16
}
}
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 5
assert_correct_partition(partitions, "foo")
url = "/#{db_name}/_partition/bar42/_find"
resp =
Couch.post(
url,
body: %{
selector: %{
value: %{
"$gte": 6,
"$lt": 16
}
}
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 5
assert_correct_partition(partitions, "bar42")
end
@tag :with_partitioned_db
test "partitioned query using _all_docs and range scan", context do
db_name = context[:db_name]
create_partition_docs(db_name)
url = "/#{db_name}/_partition/foo/_find"
resp =
Couch.post(
url,
body: %{
selector: %{
value: %{
"$gte": 6,
"$lt": 16
}
}
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 5
assert_correct_partition(partitions, "foo")
url = "/#{db_name}/_partition/bar/_find"
resp =
Couch.post(
url,
body: %{
selector: %{
value: %{
"$gte": 6,
"$lt": 16
}
}
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 5
assert_correct_partition(partitions, "bar")
end
@tag :with_partitioned_db
test "partitioned query using _all_docs", context do
db_name = context[:db_name]
create_partition_docs(db_name, "foo", "bar42")
url = "/#{db_name}/_partition/foo/_find"
resp =
Couch.post(
url,
body: %{
selector: %{
value: %{
"$gte": 6,
"$lt": 16
}
}
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 5
assert_correct_partition(partitions, "foo")
url = "/#{db_name}/_partition/bar42/_find"
resp =
Couch.post(
url,
body: %{
selector: %{
value: %{
"$gte": 6,
"$lt": 16
}
}
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 5
assert_correct_partition(partitions, "bar42")
end
@tag :with_partitioned_db
test "explain works with partitions", context do
db_name = context[:db_name]
create_partition_docs(db_name)
create_index(db_name, ["some"])
url = "/#{db_name}/_partition/foo/_explain"
resp =
Couch.post(
url,
body: %{
selector: %{
value: %{
"$gte": 6,
"$lt": 16
}
}
}
)
%{:body => body} = resp
assert body["index"]["name"] == "_all_docs"
assert body["mrargs"]["partition"] == "foo"
url = "/#{db_name}/_partition/bar/_explain"
resp =
Couch.post(
url,
body: %{
selector: %{
some: "field"
}
}
)
%{:body => body} = resp
assert body["index"]["def"] == %{"fields" => [%{"some" => "asc"}]}
assert body["mrargs"]["partition"] == "bar"
end
@tag :with_db
test "explain works with non partitioned db", context do
db_name = context[:db_name]
create_partition_docs(db_name)
create_index(db_name, ["some"])
url = "/#{db_name}/_explain"
resp =
Couch.post(
url,
body: %{
selector: %{
value: %{
"$gte": 6,
"$lt": 16
}
}
}
)
%{:body => body} = resp
assert body["index"]["name"] == "_all_docs"
assert body["mrargs"]["partition"] == :null
resp =
Couch.post(
url,
body: %{
selector: %{
some: "field"
}
}
)
%{:body => body} = resp
assert body["index"]["def"] == %{"fields" => [%{"some" => "asc"}]}
assert body["mrargs"]["partition"] == :null
end
@tag :with_partitioned_db
test "partitioned query using bookmarks", context do
db_name = context[:db_name]
create_partition_docs(db_name)
create_index(db_name, ["value"])
url = "/#{db_name}/_partition/foo/_find"
resp =
Couch.post(
url,
body: %{
selector: %{
value: %{
"$gte": 6,
"$lt": 16
}
},
limit: 3
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 3
assert_correct_partition(partitions, "foo")
%{:body => %{"bookmark" => bookmark}} = resp
resp =
Couch.post(
url,
body: %{
selector: %{
value: %{
"$gte": 6,
"$lt": 16
}
},
limit: 3,
bookmark: bookmark
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 2
assert_correct_partition(partitions, "foo")
end
@tag :with_partitioned_db
test "partitioned query with query server config set", context do
db_name = context[:db_name]
create_partition_docs(db_name)
create_index(db_name, ["value"])
# this is to test that we bypass partition_query_limit for mango
set_config({"query_server_config", "partition_query_limit", "1"})
url = "/#{db_name}/_partition/foo/_find"
resp =
Couch.post(
url,
body: %{
selector: %{
value: %{
"$gte": 6,
"$lt": 16
}
},
limit: 3
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 3
assert_correct_partition(partitions, "foo")
%{:body => %{"bookmark" => bookmark}} = resp
resp =
Couch.post(
url,
body: %{
selector: %{
value: %{
"$gte": 6,
"$lt": 16
}
},
limit: 3,
bookmark: bookmark
}
)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 2
assert_correct_partition(partitions, "foo")
end
@tag :with_partitioned_db
test "global query uses global index", context do
db_name = context[:db_name]
create_partition_docs(db_name)
create_index(db_name, ["some"], %{partitioned: false})
url = "/#{db_name}/_explain"
selector = %{
selector: %{
some: "field"
},
limit: 100
}
resp = Couch.post(url, body: selector)
assert resp.status_code == 200
%{:body => body} = resp
assert body["index"]["def"] == %{"fields" => [%{"some" => "asc"}]}
url = "/#{db_name}/_find"
resp = Couch.post(url, body: selector)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 100
end
@tag :with_partitioned_db
test "global query does not use partition index", context do
db_name = context[:db_name]
create_partition_docs(db_name)
create_index(db_name, ["some"])
url = "/#{db_name}/_explain"
selector = %{
selector: %{
some: "field"
},
limit: 100
}
resp = Couch.post(url, body: selector)
%{:body => body} = resp
assert body["index"]["name"] == "_all_docs"
url = "/#{db_name}/_find"
resp = Couch.post(url, body: selector)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 100
end
@tag :with_partitioned_db
test "partitioned query does not use global index", context do
db_name = context[:db_name]
create_partition_docs(db_name)
create_index(db_name, ["some"], %{partitioned: false})
url = "/#{db_name}/_partition/foo/_explain"
selector = %{
selector: %{
some: "field"
},
limit: 50
}
resp = Couch.post(url, body: selector)
assert resp.status_code == 200
%{:body => body} = resp
assert body["index"]["name"] == "_all_docs"
url = "/#{db_name}/_partition/foo/_find"
resp = Couch.post(url, body: selector)
assert resp.status_code == 200
partitions = get_partitions(resp)
assert length(partitions) == 50
assert_correct_partition(partitions, "foo")
end
@tag :with_partitioned_db
test "partitioned _find and _explain with missing partition returns 400", context do
db_name = context[:db_name]
selector = %{
selector: %{
some: "field"
}
}
resp = Couch.get("/#{db_name}/_partition/_find", body: selector)
validate_missing_partition(resp)
resp = Couch.get("/#{db_name}/_partition/_explain", body: selector)
validate_missing_partition(resp)
end
defp validate_missing_partition(resp) do
assert resp.status_code == 400
%{:body => %{"reason" => reason}} = resp
assert Regex.match?(~r/Partition must not start/, reason)
end
@tag :with_partitioned_db
test "partitioned query sends correct errors for sort errors", context do
db_name = context[:db_name]
create_partition_docs(db_name)
url = "/#{db_name}/_partition/foo/_find"
selector = %{
selector: %{
some: "field"
},
sort: ["some"],
limit: 50
}
resp = Couch.post(url, body: selector)
assert resp.status_code == 400
%{:body => %{"reason" => reason}} = resp
assert Regex.match?(~r/No partitioned index exists for this sort/, reason)
url = "/#{db_name}/_find"
resp = Couch.post(url, body: selector)
assert resp.status_code == 400
%{:body => %{"reason" => reason}} = resp
assert Regex.match?(~r/No global index exists for this sort/, reason)
end
end
| 22.306649 | 86 | 0.538504 |
9e4f47aaadf4a4611dd08dbc5daadd84ac532f84 | 358 | exs | Elixir | priv/repo/seeds.exs | mapmeld/superfund-me | 8bd1aeb78504e6ae068cf57dbefca05bebbb2b07 | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | mapmeld/superfund-me | 8bd1aeb78504e6ae068cf57dbefca05bebbb2b07 | [
"MIT"
] | 3 | 2016-09-09T21:09:17.000Z | 2017-09-13T17:55:20.000Z | priv/repo/seeds.exs | Georeactor/superfund-me | 8bd1aeb78504e6ae068cf57dbefca05bebbb2b07 | [
"MIT"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Superfundme.Repo.insert!(%Superfundme.SomeModel{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.833333 | 61 | 0.712291 |
9e4f575d7c896a59e6560d6863c8fabc32b4b5e3 | 3,622 | ex | Elixir | clients/dataproc/lib/google_api/dataproc/v1/model/cluster.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/dataproc/lib/google_api/dataproc/v1/model/cluster.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/dataproc/lib/google_api/dataproc/v1/model/cluster.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Dataproc.V1.Model.Cluster do
@moduledoc """
Describes the identifying information, config, and status of a cluster of Compute Engine instances.
## Attributes
- clusterName (String.t): Required. The cluster name. Cluster names within a project must be unique. Names of deleted clusters can be reused. Defaults to: `null`.
- clusterUuid (String.t): Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc generates this value when it creates the cluster. Defaults to: `null`.
- config (ClusterConfig): Required. The cluster config. Note that Cloud Dataproc may set default values, and values may change when clusters are updated. Defaults to: `null`.
- labels (%{optional(String.t) => String.t}): Optional. The labels to associate with this cluster. Label keys must contain 1 to 63 characters, and must conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt). Label values may be empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be associated with a cluster. Defaults to: `null`.
- metrics (ClusterMetrics): Contains cluster daemon metrics such as HDFS and YARN stats.Beta Feature: This report is available for testing purposes only. It may be changed before final release. Defaults to: `null`.
- projectId (String.t): Required. The Google Cloud Platform project ID that the cluster belongs to. Defaults to: `null`.
- status (ClusterStatus): Output only. Cluster status. Defaults to: `null`.
- statusHistory ([ClusterStatus]): Output only. The previous cluster status. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:clusterName => any(),
:clusterUuid => any(),
:config => GoogleApi.Dataproc.V1.Model.ClusterConfig.t(),
:labels => map(),
:metrics => GoogleApi.Dataproc.V1.Model.ClusterMetrics.t(),
:projectId => any(),
:status => GoogleApi.Dataproc.V1.Model.ClusterStatus.t(),
:statusHistory => list(GoogleApi.Dataproc.V1.Model.ClusterStatus.t())
}
field(:clusterName)
field(:clusterUuid)
field(:config, as: GoogleApi.Dataproc.V1.Model.ClusterConfig)
field(:labels, type: :map)
field(:metrics, as: GoogleApi.Dataproc.V1.Model.ClusterMetrics)
field(:projectId)
field(:status, as: GoogleApi.Dataproc.V1.Model.ClusterStatus)
field(:statusHistory, as: GoogleApi.Dataproc.V1.Model.ClusterStatus, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Dataproc.V1.Model.Cluster do
def decode(value, options) do
GoogleApi.Dataproc.V1.Model.Cluster.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataproc.V1.Model.Cluster do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 52.492754 | 445 | 0.734953 |
9e4f7ddd0397f276e0216f606c5d813c41223bb7 | 15,250 | exs | Elixir | test/phoenix/integration/websocket_channels_test.exs | zorn/phoenix | ac88958550fbd861e2f1e1af6e3c6b787b1a202e | [
"MIT"
] | 1 | 2019-07-15T21:58:09.000Z | 2019-07-15T21:58:09.000Z | test/phoenix/integration/websocket_channels_test.exs | zorn/phoenix | ac88958550fbd861e2f1e1af6e3c6b787b1a202e | [
"MIT"
] | null | null | null | test/phoenix/integration/websocket_channels_test.exs | zorn/phoenix | ac88958550fbd861e2f1e1af6e3c6b787b1a202e | [
"MIT"
] | null | null | null | Code.require_file "../../support/websocket_client.exs", __DIR__
defmodule Phoenix.Integration.WebSocketChannelsTest do
use ExUnit.Case
import ExUnit.CaptureLog
alias Phoenix.Integration.WebsocketClient
alias Phoenix.Socket.{V1, V2, Message}
alias __MODULE__.Endpoint
@moduletag :capture_log
@port 5807
Application.put_env(:phoenix, Endpoint, [
https: false,
http: [port: @port],
debug_errors: false,
server: true,
pubsub: [adapter: Phoenix.PubSub.PG2, name: __MODULE__]
])
defp lobby do
"room:lobby#{System.unique_integer()}"
end
defmodule RoomChannel do
use Phoenix.Channel
intercept ["new_msg"]
def join(topic, message, socket) do
Process.register(self(), String.to_atom(topic))
send(self(), {:after_join, message})
{:ok, socket}
end
def handle_info({:after_join, message}, socket) do
broadcast socket, "user_entered", %{user: message["user"]}
push socket, "joined", Map.merge(%{status: "connected"}, socket.assigns)
{:noreply, socket}
end
def handle_in("new_msg", message, socket) do
broadcast! socket, "new_msg", message
{:reply, :ok, socket}
end
def handle_in("boom", _message, _socket) do
raise "boom"
end
def handle_out("new_msg", payload, socket) do
push socket, "new_msg", Map.put(payload, "transport", inspect(socket.transport))
{:noreply, socket}
end
def terminate(_reason, socket) do
push socket, "you_left", %{message: "bye!"}
:ok
end
end
defmodule UserSocketConnectInfo do
use Phoenix.Socket
channel "room:*", RoomChannel
def connect(params, socket, connect_info) do
unless params["logging"] == "enabled", do: Logger.disable(self())
address = Tuple.to_list(connect_info.peer_data.address) |> Enum.join(".")
uri = Map.from_struct(connect_info.uri)
x_headers = Enum.into(connect_info.x_headers, %{})
connect_info =
connect_info
|> update_in([:peer_data], &Map.put(&1, :address, address))
|> Map.put(:uri, uri)
|> Map.put(:x_headers, x_headers)
socket =
socket
|> assign(:user_id, params["user_id"])
|> assign(:connect_info, connect_info)
{:ok, socket}
end
def id(socket) do
if id = socket.assigns.user_id, do: "user_sockets:#{id}"
end
end
defmodule UserSocket do
use Phoenix.Socket
channel "room:*", RoomChannel
def connect(%{"reject" => "true"}, _socket) do
:error
end
def connect(params, socket) do
unless params["logging"] == "enabled", do: Logger.disable(self())
{:ok, assign(socket, :user_id, params["user_id"])}
end
def id(socket) do
if id = socket.assigns.user_id, do: "user_sockets:#{id}"
end
end
defmodule Endpoint do
use Phoenix.Endpoint, otp_app: :phoenix
socket "/ws", UserSocket,
websocket: [
check_origin: ["//example.com"],
timeout: 200
]
socket "/ws/admin", UserSocket,
websocket: [
check_origin: ["//example.com"],
timeout: 200
]
socket "/ws/connect_info", UserSocketConnectInfo,
websocket: [
check_origin: ["//example.com"],
timeout: 200,
connect_info: [:x_headers, :peer_data, :uri]
]
end
setup_all do
capture_log fn -> Endpoint.start_link() end
:ok
end
for {serializer, vsn, join_ref} <- [{V1.JSONSerializer, "1.0.0", nil}, {V2.JSONSerializer, "2.0.0", "1"}] do
@serializer serializer
@vsn vsn
@vsn_path "ws://127.0.0.1:#{@port}/ws/websocket?vsn=#{@vsn}"
@join_ref join_ref
describe "with #{vsn} serializer #{inspect serializer}" do
test "endpoint handles multiple mount segments" do
{:ok, sock} = WebsocketClient.start_link(self(), "ws://127.0.0.1:#{@port}/ws/admin/websocket?vsn=#{@vsn}", @serializer)
WebsocketClient.join(sock, "room:admin-lobby1", %{})
assert_receive %Message{event: "phx_reply",
payload: %{"response" => %{}, "status" => "ok"},
join_ref: @join_ref,
ref: "1", topic: "room:admin-lobby1"}
end
test "join, leave, and event messages" do
{:ok, sock} = WebsocketClient.start_link(self(), @vsn_path, @serializer)
lobby = lobby()
WebsocketClient.join(sock, lobby, %{})
assert_receive %Message{event: "phx_reply",
join_ref: @join_ref,
payload: %{"response" => %{}, "status" => "ok"},
ref: "1", topic: ^lobby}
assert_receive %Message{event: "joined",
payload: %{"status" => "connected", "user_id" => nil}}
assert_receive %Message{event: "user_entered",
payload: %{"user" => nil},
ref: nil, topic: ^lobby}
channel_pid = Process.whereis(String.to_atom(lobby))
assert channel_pid
assert Process.alive?(channel_pid)
WebsocketClient.send_event(sock, lobby, "new_msg", %{body: "hi!"})
assert_receive %Message{event: "new_msg", payload: %{"transport" => ":websocket", "body" => "hi!"}}
WebsocketClient.leave(sock, lobby, %{})
assert_receive %Message{event: "you_left", payload: %{"message" => "bye!"}}
assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}}
assert_receive %Message{event: "phx_close", payload: %{}}
refute Process.alive?(channel_pid)
WebsocketClient.send_event(sock, lobby, "new_msg", %{body: "Should ignore"})
refute_receive %Message{event: "new_msg"}
assert_receive %Message{event: "phx_reply", payload: %{"response" => %{"reason" => "unmatched topic"}}}
WebsocketClient.send_event(sock, lobby, "new_msg", %{body: "Should ignore"})
refute_receive %Message{event: "new_msg"}
end
test "transport x_headers are extracted to the socket connect_info" do
extra_headers = [{"x-application", "Phoenix"}]
{:ok, sock} =
WebsocketClient.start_link(
self(),
"ws://127.0.0.1:#{@port}/ws/connect_info/websocket?vsn=#{@vsn}",
@serializer,
extra_headers
)
WebsocketClient.join(sock, lobby(), %{})
assert_receive %Message{event: "joined",
payload: %{"connect_info" =>
%{"x_headers" =>
%{"x-application" => "Phoenix"}}}}
end
test "transport peer_data is extracted to the socket connect_info" do
{:ok, sock} =
WebsocketClient.start_link(
self(),
"ws://127.0.0.1:#{@port}/ws/connect_info/websocket?vsn=#{@vsn}",
@serializer
)
WebsocketClient.join(sock, lobby(), %{})
assert_receive %Message{event: "joined",
payload: %{"connect_info" =>
%{"peer_data" =>
%{"address" => "127.0.0.1",
"port" => _,
"ssl_cert" => nil}}}}
end
test "transport uri is extracted to the socket connect_info" do
{:ok, sock} =
WebsocketClient.start_link(
self(),
"ws://127.0.0.1:#{@port}/ws/connect_info/websocket?vsn=#{@vsn}",
@serializer
)
WebsocketClient.join(sock, lobby(), %{})
assert_receive %Message{event: "joined",
payload: %{"connect_info" =>
%{"uri" =>
%{"host" => "127.0.0.1",
"path" => "/ws/connect_info/websocket",
"query" => "vsn=#{@vsn}",
"scheme" => "http",
"port" => 80}}}}
end
test "logs and filter params on join and handle_in" do
topic = "room:admin-lobby2"
{:ok, sock} = WebsocketClient.start_link(self(), "#{@vsn_path}&logging=enabled", @serializer)
log = capture_log fn ->
WebsocketClient.join(sock, topic, %{"join" => "yes", "password" => "no"})
assert_receive %Message{event: "phx_reply",
join_ref: @join_ref,
payload: %{"response" => %{}, "status" => "ok"},
ref: "1", topic: "room:admin-lobby2"}
end
assert log =~ "Parameters: %{\"join\" => \"yes\", \"password\" => \"[FILTERED]\"}"
log = capture_log fn ->
WebsocketClient.send_event(sock, topic, "new_msg", %{"in" => "yes", "password" => "no"})
assert_receive %Message{event: "phx_reply", ref: "2"}
end
assert log =~ "Parameters: %{\"in\" => \"yes\", \"password\" => \"[FILTERED]\"}"
end
test "sends phx_error if a channel server abnormally exits" do
{:ok, sock} = WebsocketClient.start_link(self(), @vsn_path, @serializer)
lobby = lobby()
WebsocketClient.join(sock, lobby, %{})
assert_receive %Message{event: "phx_reply", ref: "1", payload: %{"response" => %{}, "status" => "ok"}}
assert_receive %Message{event: "joined"}
assert_receive %Message{event: "user_entered"}
capture_log fn ->
WebsocketClient.send_event(sock, lobby, "boom", %{})
assert_receive %Message{event: "phx_error", payload: %{}, topic: ^lobby}
end
end
test "channels are terminated if transport normally exits" do
{:ok, sock} = WebsocketClient.start_link(self(), @vsn_path, @serializer)
lobby = lobby()
WebsocketClient.join(sock, lobby, %{})
assert_receive %Message{event: "phx_reply", ref: "1", payload: %{"response" => %{}, "status" => "ok"}}
assert_receive %Message{event: "joined"}
channel = Process.whereis(String.to_atom(lobby))
assert channel
Process.monitor(channel)
WebsocketClient.close(sock)
assert_receive {:DOWN, _, :process, ^channel, {:shutdown, :closed}}
end
test "refuses websocket events that haven't joined" do
{:ok, sock} = WebsocketClient.start_link(self(), @vsn_path, @serializer)
WebsocketClient.send_event(sock, lobby(), "new_msg", %{body: "hi!"})
refute_receive %Message{event: "new_msg"}
assert_receive %Message{event: "phx_reply", payload: %{"response" => %{"reason" => "unmatched topic"}}}
WebsocketClient.send_event(sock, lobby(), "new_msg", %{body: "Should ignore"})
refute_receive %Message{event: "new_msg"}
end
test "refuses unallowed origins" do
capture_log fn ->
assert {:ok, _} =
WebsocketClient.start_link(self(), @vsn_path, @serializer,
[{"origin", "https://example.com"}])
assert {:error, {403, _}} =
WebsocketClient.start_link(self(), @vsn_path, @serializer,
[{"origin", "http://notallowed.com"}])
end
end
test "refuses connects that error with 403 response" do
assert WebsocketClient.start_link(self(), "#{@vsn_path}&reject=true", @serializer) ==
{:error, {403, "Forbidden"}}
end
test "shuts down when receiving disconnect broadcasts on socket's id" do
{:ok, sock} = WebsocketClient.start_link(self(), "#{@vsn_path}&user_id=1001", @serializer)
WebsocketClient.join(sock, "room:wsdisconnect1", %{})
assert_receive %Message{topic: "room:wsdisconnect1", event: "phx_reply",
ref: "1", payload: %{"response" => %{}, "status" => "ok"}}
WebsocketClient.join(sock, "room:wsdisconnect2", %{})
assert_receive %Message{topic: "room:wsdisconnect2", event: "phx_reply",
ref: "2", payload: %{"response" => %{}, "status" => "ok"}}
chan1 = Process.whereis(:"room:wsdisconnect1")
assert chan1
chan2 = Process.whereis(:"room:wsdisconnect2")
assert chan2
Process.monitor(sock)
Process.monitor(chan1)
Process.monitor(chan2)
Endpoint.broadcast("user_sockets:1001", "disconnect", %{})
assert_receive {:DOWN, _, :process, ^sock, :normal}
assert_receive {:DOWN, _, :process, ^chan1, shutdown}
#shutdown for cowboy, {:shutdown, :closed} for cowboy 2
assert shutdown in [:shutdown, {:shutdown, :closed}]
assert_receive {:DOWN, _, :process, ^chan2, shutdown}
assert shutdown in [:shutdown, {:shutdown, :closed}]
end
test "duplicate join event closes existing channel" do
{:ok, sock} = WebsocketClient.start_link(self(), "#{@vsn_path}&user_id=1001", @serializer)
WebsocketClient.join(sock, "room:joiner", %{})
assert_receive %Message{topic: "room:joiner", event: "phx_reply",
ref: "1", payload: %{"response" => %{}, "status" => "ok"}}
WebsocketClient.join(sock, "room:joiner", %{})
assert_receive %Message{topic: "room:joiner", event: "phx_reply",
ref: "2", payload: %{"response" => %{}, "status" => "ok"}}
assert_receive %Message{topic: "room:joiner", event: "phx_close",
ref: "1", payload: %{}}
end
test "returns 403 when versions to not match" do
assert capture_log(fn ->
url = "ws://127.0.0.1:#{@port}/ws/websocket?vsn=123.1.1"
assert WebsocketClient.start_link(self(), url, @serializer) ==
{:error, {403, "Forbidden"}}
end) =~ "The client's requested transport version \"123.1.1\" does not match server's version"
end
test "shuts down if client goes quiet" do
{:ok, socket} = WebsocketClient.start_link(self(), @vsn_path, @serializer)
Process.monitor(socket)
WebsocketClient.send_heartbeat(socket)
assert_receive %Message{event: "phx_reply",
payload: %{"response" => %{}, "status" => "ok"},
ref: "1", topic: "phoenix"}
assert_receive {:DOWN, _, :process, ^socket, :normal}, 400
end
test "warns for unmatched topic" do
{:ok, sock} = WebsocketClient.start_link(self(), "#{@vsn_path}&logging=enabled", @serializer)
log = capture_log(fn ->
WebsocketClient.join(sock, "unmatched-topic", %{})
assert_receive %Message{
event: "phx_reply",
ref: "1",
topic: "unmatched-topic",
join_ref: nil,
payload: %{
"status" => "error",
"response" => %{"reason" => "unmatched topic"}
}
}
end)
assert log =~ "[warn] Ignoring unmatched topic \"unmatched-topic\" in Phoenix.Integration.WebSocketChannelsTest.UserSocket"
end
end
end
end
| 37.561576 | 132 | 0.552721 |
9e4f8973947169f2f7970f7cc3b27e4657cb81db | 2,386 | exs | Elixir | playground/elixir-live-doom-fire/config/prod.exs | allmonty/doom-fire-algorithm | dac483b85973020c5f86962d3d9344018711043b | [
"MIT"
] | null | null | null | playground/elixir-live-doom-fire/config/prod.exs | allmonty/doom-fire-algorithm | dac483b85973020c5f86962d3d9344018711043b | [
"MIT"
] | null | null | null | playground/elixir-live-doom-fire/config/prod.exs | allmonty/doom-fire-algorithm | dac483b85973020c5f86962d3d9344018711043b | [
"MIT"
] | null | null | null | use Mix.Config
# For production, we often load configuration from external
# sources, such as your system environment. For this reason,
# you won't find the :http configuration below, but set inside
# LiveDoomFireWeb.Endpoint.init/2 when load_from_system_env is
# true. Any dynamic configuration should be done there.
#
# Don't forget to configure the url host to something meaningful,
# Phoenix uses this information when generating URLs.
#
# Finally, we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the mix phx.digest task
# which you typically run after static files are built.
config :live_doom_fire, LiveDoomFireWeb.Endpoint,
load_from_system_env: true,
url: [scheme: "https", host: "elixir-doom-fire.herokuapp.com", port: 443],
force_ssl: [rewrite_on: [:x_forwarded_proto]],
cache_static_manifest: "priv/static/cache_manifest.json",
server: true,
root: ".",
version: Application.spec(:myapp, :vsn)
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :live_doom_fire, LiveDoomFireWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [:inet6,
# port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :live_doom_fire, LiveDoomFireWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :live_doom_fire, LiveDoomFireWeb.Endpoint, server: true
#
# Finally import the config/prod.secret.exs
# which should be versioned separately.
# import_config "prod.secret.exs"
| 34.57971 | 76 | 0.724644 |
9e4f9ab8135bb157074c755c3e205d7ce2506729 | 1,819 | ex | Elixir | lib/wabanex_web/telemetry.ex | brunodantas/wabanex | 4d8e4258969203cf0adf5b6561fbd862085afed7 | [
"MIT"
] | 61 | 2021-06-22T00:15:59.000Z | 2022-01-31T15:13:51.000Z | lib/wabanex_web/telemetry.ex | brunodantas/wabanex | 4d8e4258969203cf0adf5b6561fbd862085afed7 | [
"MIT"
] | 1 | 2021-06-21T18:42:41.000Z | 2021-06-21T18:42:41.000Z | lib/wabanex_web/telemetry.ex | brunodantas/wabanex | 4d8e4258969203cf0adf5b6561fbd862085afed7 | [
"MIT"
] | 56 | 2021-06-21T17:17:36.000Z | 2022-03-15T02:48:59.000Z | defmodule WabanexWeb.Telemetry do
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
# Telemetry poller will execute the given period measurements
# every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:route],
unit: {:native, :millisecond}
),
# Database Metrics
summary("wabanex.repo.query.total_time", unit: {:native, :millisecond}),
summary("wabanex.repo.query.decode_time", unit: {:native, :millisecond}),
summary("wabanex.repo.query.query_time", unit: {:native, :millisecond}),
summary("wabanex.repo.query.queue_time", unit: {:native, :millisecond}),
summary("wabanex.repo.query.idle_time", unit: {:native, :millisecond}),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {WabanexWeb, :count_users, []}
]
end
end
| 32.482143 | 86 | 0.671798 |
9e4fa58fadf4fdb4fba79a8563c8890fccc2f7fa | 1,127 | ex | Elixir | lib/minuet/type/condition.ex | exstruct/minuet | 2ca8f6da3000bfdf00f271eb695cb468b50b71bb | [
"MIT"
] | null | null | null | lib/minuet/type/condition.ex | exstruct/minuet | 2ca8f6da3000bfdf00f271eb695cb468b50b71bb | [
"MIT"
] | null | null | null | lib/minuet/type/condition.ex | exstruct/minuet | 2ca8f6da3000bfdf00f271eb695cb468b50b71bb | [
"MIT"
] | null | null | null | defmodule Minuet.Type.Condition do
defstruct expression: nil,
enter: nil,
exit: nil,
value: nil,
line: nil
defimpl Minuet.Type do
alias Minuet.Util
def compile(
%{line: line, enter: v_enter, exit: v_exit, expression: expression, value: value},
serializer,
prev_vars
) do
{v_enter, vars} = Util.compile(v_enter, serializer, prev_vars)
{value, vars} = Util.compile(value, serializer, vars)
{v_exit, vars} = Util.compile(v_exit, serializer, vars)
scope = Util.scope(vars)
body =
Util.join(
[
v_enter,
value,
v_exit,
scope
],
line
)
{
quote line: line do
unquote(scope) =
case unquote(expression) do
res when res === nil or res === false ->
# TODO fill in additional scope values with nil
unquote(scope)
_ ->
unquote(body)
end
end,
vars
}
end
end
end
| 22.54 | 92 | 0.484472 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.