hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9eae66d627b270dabc597633ab7ae37ba4317348 | 708 | ex | Elixir | lib/calendarific/apis/holidays.ex | Bounceapp/elixir-calendarific | f3c3b9b8d17d0fbf9773f3948bacf687a093a832 | [
"MIT"
] | 3 | 2021-03-16T15:21:08.000Z | 2021-03-19T22:38:20.000Z | lib/calendarific/apis/holidays.ex | Bounceapp/elixir-calendarific | f3c3b9b8d17d0fbf9773f3948bacf687a093a832 | [
"MIT"
] | null | null | null | lib/calendarific/apis/holidays.ex | Bounceapp/elixir-calendarific | f3c3b9b8d17d0fbf9773f3948bacf687a093a832 | [
"MIT"
] | null | null | null | defmodule Calendarific.Apis.Holidays do
@derive [Poison.Encoder]
alias Calendarific.HttpClient
alias Calendarific.Types
@endpoint "holidays"
def fetch(country_code, year) do
HttpClient.request(:get, @endpoint, country: country_code, year: year)
|> Map.get("response")
|> parse_holidays()
end
defp parse_holidays([]), do: []
defp parse_holidays(response) do
response
|> Map.get("holidays")
|> Enum.map(fn h ->
h
|> Poison.encode!()
|> Poison.decode!(
as: %Types.Holiday{
country: %Types.Country{},
date: %Types.Date{datetime: %Types.DateTime{}},
states: [%Types.State{}]
}
)
end)
end
end
| 21.454545 | 74 | 0.60452 |
9eae6c8e6b10ad9868029b4e5d9ede44b4ffcac9 | 1,840 | exs | Elixir | clients/apps_activity/mix.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/apps_activity/mix.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/apps_activity/mix.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AppsActivity.Mixfile do
use Mix.Project
@version "0.12.0"
def project() do
[
app: :google_api_apps_activity,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/apps_activity"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Drive Activity API client library. Provides a historical view of activity.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/apps_activity",
"Homepage" => "https://developers.google.com/google-apps/activity/"
}
]
end
end
| 27.462687 | 104 | 0.657065 |
9eae8b55965b38a255cc56043d2548c29fc15f52 | 992 | exs | Elixir | mix.exs | mjaric/tds-encoding | 9bcc0cfcd9c1a02125f0ed3384868b42727cf5cb | [
"MIT"
] | 1 | 2021-10-09T09:09:08.000Z | 2021-10-09T09:09:08.000Z | mix.exs | mjaric/tds-encoding | 9bcc0cfcd9c1a02125f0ed3384868b42727cf5cb | [
"MIT"
] | 2 | 2021-05-24T15:21:54.000Z | 2021-10-09T18:02:18.000Z | mix.exs | mjaric/tds-encoding | 9bcc0cfcd9c1a02125f0ed3384868b42727cf5cb | [
"MIT"
] | 3 | 2021-05-21T00:26:31.000Z | 2021-09-29T08:06:05.000Z | defmodule Tds.Encoding.MixProject do
use Mix.Project
def project do
[
app: :tds_encoding,
compilers: Mix.compilers(),
version: "1.1.1",
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
deps: deps(),
description:
"String encoding/decoding NIF using rust [encoding](https://crates.io/crates/encoding) library",
package: package()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger, :rustler]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:rustler, "~> 0.23.0"},
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false}
]
end
defp package do
[
name: "tds_encoding",
maintainers: ["Milan Jarić"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/mjaric/tds-encoding"},
files: ~w(.formatter.exs mix.exs README.md lib native)
]
end
end
| 23.069767 | 104 | 0.586694 |
9eae8fa278235bbd6b4192be1fe601a6d5b0f619 | 3,978 | exs | Elixir | test/grant_type/client_credentials_test.exs | smartvokat/door_frame | 9052ab233b2af65582dd1925a998e2b46b0eb569 | [
"Apache-2.0"
] | 5 | 2019-06-12T11:26:12.000Z | 2022-01-28T19:45:47.000Z | test/grant_type/client_credentials_test.exs | smartvokat/door_frame | 9052ab233b2af65582dd1925a998e2b46b0eb569 | [
"Apache-2.0"
] | 2 | 2019-06-04T13:19:55.000Z | 2021-04-19T07:17:34.000Z | test/grant_type/client_credentials_test.exs | smartvokat/door_frame | 9052ab233b2af65582dd1925a998e2b46b0eb569 | [
"Apache-2.0"
] | 1 | 2019-06-12T11:26:14.000Z | 2019-06-12T11:26:14.000Z | defmodule DoorFrame.GrantType.ClientCredentialsTest do
alias DoorFrame.Error
alias DoorFrame.GrantType.ClientCredentials
alias DoorFrame.Request
alias DoorFrame.Response
use ExUnit.Case
setup do
request = %Request{
client_id: "secret_client_id",
client_secret: "secret_client_secret",
scope: "read write"
}
response = %Response{}
[request: request, response: response]
end
describe "handle()" do
test "returns an error when the request does not contain a client_id", %{response: response} do
request = %Request{}
assert {:error, %Error{error: "invalid_request"} = error} =
ClientCredentials.handle(request, response)
assert error.description =~ "client_id"
end
test "returns an error when the request does not contain a client_secret", %{
response: response
} do
request = %Request{client_id: "secret_id"}
assert {:error, %Error{error: "invalid_request"} = error} =
ClientCredentials.handle(request, response)
assert error.description =~ "client_secret"
end
test "calls the required callback functions of the handler", %{response: response} do
defmodule MyHandler1 do
use DoorFrame
def validate_scope("read write") do
send(self(), :validate_scope)
{:ok, ["read", "write"]}
end
def get_client(%Request{} = request, %Response{}) do
assert request.client_id == "secret_client_id"
assert request.client_secret == "secret_client_secret"
send(self(), :get_client_called)
{:ok, %{id: "c"}}
end
def get_resource_owner_from_client(%Request{}, %Response{client: %{id: "c"}}) do
send(self(), :get_resource_owner_from_client)
{:ok, %{id: "ro"}}
end
def generate_token(:access_token, _request, _response) do
send(self(), :generate_access_token)
{:ok, "at"}
end
def generate_token(:refresh_token, _request, _response) do
send(self(), :generate_refresh_token)
{:ok, %{id: "rt"}}
end
def persist_tokens(
%{access_token: "at", refresh_token: %{id: "rt"}},
%{client: %{id: "c"}, resource_owner: %{id: "ro"}}
) do
send(self(), :persist_access_token)
send(self(), :persist_refresh_token)
{:ok}
end
end
MyHandler1.create_request(
client_id: "secret_client_id",
client_secret: "secret_client_secret",
scope: "read write"
)
|> ClientCredentials.handle(response)
assert_received :validate_scope
assert_received :get_client_called
assert_received :get_resource_owner_from_client
assert_received :generate_access_token
assert_received :generate_refresh_token
assert_received :persist_access_token
assert_received :persist_refresh_token
end
test "handles errors without a description in get_client", %{
request: request,
response: response
} do
defmodule MyHandler2 do
use DoorFrame
def get_client(_, _) do
{:error}
end
end
assert {:error, %Error{error: "invalid_client"}} =
MyHandler2.create_request(client_id: "test", client_secret: "test")
|> ClientCredentials.handle(response)
end
test "handles errors with a description in get_client", %{
request: request,
response: response
} do
defmodule MyHandler3 do
use DoorFrame
def get_client(_, _) do
{:error, "Client not found"}
end
end
assert {:error, %Error{error: "invalid_client", description: description}} =
MyHandler3.create_request(client_id: "test", client_secret: "test")
|> ClientCredentials.handle(response)
assert description = "Client not found"
end
end
end
| 29.466667 | 99 | 0.620161 |
9eaeeea6eba8d2a82da822f9ed7d1739fe437c42 | 554 | ex | Elixir | lib/gexbot_web/router.ex | jakewilkins/gexbot | b92cd5bae794becaedbee9c8884e4b66942198b7 | [
"WTFPL"
] | null | null | null | lib/gexbot_web/router.ex | jakewilkins/gexbot | b92cd5bae794becaedbee9c8884e4b66942198b7 | [
"WTFPL"
] | null | null | null | lib/gexbot_web/router.ex | jakewilkins/gexbot | b92cd5bae794becaedbee9c8884e4b66942198b7 | [
"WTFPL"
] | null | null | null | defmodule GexbotWeb.Router do
use GexbotWeb, :router
pipeline :public_api do
plug :accepts, ["json"]
end
pipeline :authenticated_api do
plug :accepts, ["json"]
end
scope "/api", GexbotWeb do
pipe_through :authenticated_api
resources "/installations", InstallationController, except: [:new, :edit, :create]
resources "/authorizations", AuthorizationController, except: [:new, :edit]
resources "/people", PersonController, except: [:new, :edit]
end
post "hooks/event", GexbotWeb.GithubHookController, :event
end
| 26.380952 | 86 | 0.712996 |
9eaefa491aeb5c96633495f87d420693037492d4 | 1,722 | exs | Elixir | test/sponsorly_web/controllers/user_registration_controller_test.exs | AminArria/sponsorly | fa78ead63076a54cb1cb1f9d4f4c5fd7a4a78fac | [
"MIT"
] | null | null | null | test/sponsorly_web/controllers/user_registration_controller_test.exs | AminArria/sponsorly | fa78ead63076a54cb1cb1f9d4f4c5fd7a4a78fac | [
"MIT"
] | null | null | null | test/sponsorly_web/controllers/user_registration_controller_test.exs | AminArria/sponsorly | fa78ead63076a54cb1cb1f9d4f4c5fd7a4a78fac | [
"MIT"
] | null | null | null | defmodule SponsorlyWeb.UserRegistrationControllerTest do
use SponsorlyWeb.ConnCase, async: true
import Sponsorly.AccountsFixtures
describe "GET /users/register" do
test "renders registration page", %{conn: conn} do
conn = get(conn, Routes.user_registration_path(conn, :new))
response = html_response(conn, 200)
assert response =~ "<h1>Register</h1>"
assert response =~ "Log in</a>"
assert response =~ "Register</a>"
end
test "redirects if already logged in", %{conn: conn} do
conn = conn |> log_in_user(user_fixture()) |> get(Routes.user_registration_path(conn, :new))
assert redirected_to(conn) == "/"
end
end
describe "POST /users/register" do
@tag :capture_log
test "creates account and redirect to sign in (waiting confirmation)", %{conn: conn} do
email = unique_user_email()
conn =
post(conn, Routes.user_registration_path(conn, :create), %{
"user" => %{"email" => email, "password" => valid_user_password()}
})
refute get_session(conn, :user_token)
assert redirected_to(conn) == Routes.user_session_path(conn, :new)
assert get_flash(conn, :info) == "User created successfully. Please confirm your email to log in."
end
test "render errors for invalid data", %{conn: conn} do
conn =
post(conn, Routes.user_registration_path(conn, :create), %{
"user" => %{"email" => "with spaces", "password" => "too short"}
})
response = html_response(conn, 200)
assert response =~ "<h1>Register</h1>"
assert response =~ "must have the @ sign and no spaces"
assert response =~ "should be at least 12 character"
end
end
end
| 35.142857 | 104 | 0.646341 |
9eaf128bb83bbbdb7426dc56df504c97633c2abf | 1,044 | exs | Elixir | test/lib/canvas_api/unfurl/gist_test.exs | usecanvas/api-v2 | 59214db3a2cf12eb939f22fed320fd10cb47cdfe | [
"Apache-2.0"
] | 123 | 2017-04-04T18:15:48.000Z | 2021-04-26T08:04:22.000Z | test/lib/canvas_api/unfurl/gist_test.exs | usecanvas/api-v2 | 59214db3a2cf12eb939f22fed320fd10cb47cdfe | [
"Apache-2.0"
] | null | null | null | test/lib/canvas_api/unfurl/gist_test.exs | usecanvas/api-v2 | 59214db3a2cf12eb939f22fed320fd10cb47cdfe | [
"Apache-2.0"
] | 17 | 2017-04-04T18:58:29.000Z | 2021-05-10T21:39:16.000Z | defmodule CanvasAPI.Unfurl.GistTest do
use ExUnit.Case
alias CanvasAPI.Unfurl
alias CanvasAPI.Unfurl.Gist, as: UnfurlGist
import Mock
test "unfurls via a GitHub Gist" do
url = "https://gist.github.com/user/id.json"
with_mock HTTPoison, [get: mock_get(url)] do
unfurl = UnfurlGist.unfurl(url)
assert unfurl == %Unfurl{
id: url,
html: ~s(<div id="gist"></div><link rel="stylesheet" type="text/css" href="styles.css">),
provider_icon_url: CanvasAPI.Unfurl.GitHub.provider_icon_url,
provider_name: "GitHub Gist",
provider_url: "https://gist.github.com",
title: "file_01.ex",
text: "Description",
url: url}
end
end
defp mock_get(url) do
fn(^url) ->
{:ok,
%HTTPoison.Response{
status_code: 200,
body: Poison.encode!(%{
"div" => ~s(<div id="gist"></div>),
"stylesheet" => "styles.css",
"files" => ["file_01.ex"],
"description" => "Description"})}}
end
end
end
| 27.473684 | 97 | 0.585249 |
9eaf2231460f53a4ac25b48b373d744517592190 | 14,290 | exs | Elixir | lib/elixir/test/elixir/task_test.exs | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | 4 | 2015-12-22T02:46:39.000Z | 2016-04-26T06:11:09.000Z | lib/elixir/test/elixir/task_test.exs | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/task_test.exs | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule TaskTest do
use ExUnit.Case
setup do
Logger.remove_backend(:console)
on_exit fn -> Logger.add_backend(:console, flush: true) end
:ok
end
def wait_and_send(caller, atom) do
send caller, :ready
receive do: (true -> true)
send caller, atom
end
defp create_task_in_other_process do
caller = self()
spawn fn -> send caller, Task.async(fn -> nil end) end
receive do: (task -> task)
end
defp create_dummy_task do
%Task{ref: make_ref, pid: spawn(fn() -> :ok end), owner: self()}
end
test "async/1" do
parent = self()
fun = fn -> wait_and_send(parent, :done) end
task = Task.async(fun)
# Assert the struct
assert task.__struct__ == Task
assert is_pid task.pid
assert is_reference task.ref
# Assert the link
{:links, links} = Process.info(self, :links)
assert task.pid in links
receive do: (:ready -> :ok)
# Assert the initial call
{:name, fun_name} = :erlang.fun_info(fun, :name)
assert {__MODULE__, fun_name, 0} === :proc_lib.translate_initial_call(task.pid)
# Run the task
send task.pid, true
# Assert response and monitoring messages
ref = task.ref
assert_receive {^ref, :done}
assert_receive {:DOWN, ^ref, _, _, :normal}
end
test "async/3" do
task = Task.async(__MODULE__, :wait_and_send, [self(), :done])
assert task.__struct__ == Task
{:links, links} = Process.info(self, :links)
assert task.pid in links
receive do: (:ready -> :ok)
assert {__MODULE__, :wait_and_send, 2} === :proc_lib.translate_initial_call(task.pid)
send(task.pid, true)
assert Task.await(task) === :done
assert_receive :done
end
test "start/1" do
parent = self()
fun = fn -> wait_and_send(parent, :done) end
{:ok, pid} = Task.start(fun)
{:links, links} = Process.info(self, :links)
refute pid in links
receive do: (:ready -> :ok)
{:name, fun_name} = :erlang.fun_info(fun, :name)
assert {__MODULE__, fun_name, 0} === :proc_lib.translate_initial_call(pid)
send pid, true
assert_receive :done
end
test "start/3" do
{:ok, pid} = Task.start(__MODULE__, :wait_and_send, [self(), :done])
{:links, links} = Process.info(self, :links)
refute pid in links
receive do: (:ready -> :ok)
assert {__MODULE__, :wait_and_send, 2} === :proc_lib.translate_initial_call(pid)
send pid, true
assert_receive :done
end
test "start_link/1" do
parent = self()
fun = fn -> wait_and_send(parent, :done) end
{:ok, pid} = Task.start_link(fun)
{:links, links} = Process.info(self, :links)
assert pid in links
receive do: (:ready -> :ok)
{:name, fun_name} = :erlang.fun_info(fun, :name)
assert {__MODULE__, fun_name, 0} === :proc_lib.translate_initial_call(pid)
send pid, true
assert_receive :done
end
test "start_link/3" do
{:ok, pid} = Task.start_link(__MODULE__, :wait_and_send, [self(), :done])
{:links, links} = Process.info(self, :links)
assert pid in links
receive do: (:ready -> :ok)
assert {__MODULE__, :wait_and_send, 2} === :proc_lib.translate_initial_call(pid)
send pid, true
assert_receive :done
end
test "await/2 exits on timeout" do
task = %Task{ref: make_ref(), owner: self()}
assert catch_exit(Task.await(task, 0)) == {:timeout, {Task, :await, [task, 0]}}
end
test "await/2 exits on normal exit" do
task = Task.async(fn -> exit :normal end)
assert catch_exit(Task.await(task)) == {:normal, {Task, :await, [task, 5000]}}
end
test "await/2 exits on task throw" do
Process.flag(:trap_exit, true)
task = Task.async(fn -> throw :unknown end)
assert {{{:nocatch, :unknown}, _}, {Task, :await, [^task, 5000]}} =
catch_exit(Task.await(task))
end
test "await/2 exits on task error" do
Process.flag(:trap_exit, true)
task = Task.async(fn -> raise "oops" end)
assert {{%RuntimeError{}, _}, {Task, :await, [^task, 5000]}} =
catch_exit(Task.await(task))
end
test "await/2 exits on task undef module error" do
Process.flag(:trap_exit, true)
task = Task.async(&:module_does_not_exist.undef/0)
assert {{:undef, [{:module_does_not_exist, :undef, _, _} | _]},
{Task, :await, [^task, 5000]}} =
catch_exit(Task.await(task))
end
test "await/2 exits on task undef function error" do
Process.flag(:trap_exit, true)
task = Task.async(&TaskTest.undef/0)
assert {{:undef, [{TaskTest, :undef, _, _} | _]},
{Task, :await, [^task, 5000]}} =
catch_exit(Task.await(task))
end
test "await/2 exits on task exit" do
Process.flag(:trap_exit, true)
task = Task.async(fn -> exit :unknown end)
assert {:unknown, {Task, :await, [^task, 5000]}} =
catch_exit(Task.await(task))
end
test "await/2 exits on :noconnection" do
ref = make_ref()
task = %Task{ref: ref, pid: self(), owner: self()}
send self(), {:DOWN, ref, :process, self(), :noconnection}
assert catch_exit(Task.await(task)) |> elem(0) == {:nodedown, :nonode@nohost}
end
test "await/2 exits on :noconnection from named monitor" do
ref = make_ref()
task = %Task{ref: ref, pid: nil, owner: self()}
send self(), {:DOWN, ref, :process, {:name, :node}, :noconnection}
assert catch_exit(Task.await(task)) |> elem(0) == {:nodedown, :node}
end
test "await/2 raises when invoked from a non-owner process" do
task = create_task_in_other_process()
message = "task #{inspect task} must be queried from the owner but was queried from #{inspect self()}"
assert_raise ArgumentError, message, fn -> Task.await(task, 1) end
end
test "yield/2 returns {:ok, result} when reply and :DOWN in message queue" do
task = %Task{ref: make_ref, owner: self()}
send(self(), {task.ref, :result})
send(self(), {:DOWN, task.ref, :process, self, :abnormal})
assert Task.yield(task, 0) == {:ok, :result}
refute_received {:DOWN, _, _, _, _}
end
test "yield/2 returns nil on timeout" do
task = %Task{ref: make_ref(), owner: self()}
assert Task.yield(task, 0) == nil
end
test "yield/2 return exit on normal exit" do
task = Task.async(fn -> exit :normal end)
assert Task.yield(task) == {:exit, :normal}
end
test "yield/2 exits on :noconnection" do
ref = make_ref()
task = %Task{ref: ref, pid: self(), owner: self()}
send self(), {:DOWN, ref, self(), self(), :noconnection}
assert catch_exit(Task.yield(task)) |> elem(0) == {:nodedown, :nonode@nohost}
end
test "yield/2 raises when invoked from a non-owner process" do
task = create_task_in_other_process()
message = "task #{inspect task} must be queried from the owner but was queried from #{inspect self()}"
assert_raise ArgumentError, message, fn -> Task.yield(task, 1) end
end
test "yield_many/2 returns {:ok, result} when reply and :DOWN in message queue" do
task = %Task{ref: make_ref, owner: self()}
send(self(), {task.ref, :result})
send(self(), {:DOWN, task.ref, :process, self, :abnormal})
assert Task.yield_many([task], 0) == [{task, {:ok, :result}}]
refute_received {:DOWN, _, _, _, _}
end
test "yield_many/2 returns nil on timeout" do
task = %Task{ref: make_ref(), owner: self()}
assert Task.yield_many([task], 0) == [{task, nil}]
end
test "yield_many/2 return exit on normal exit" do
task = Task.async(fn -> exit :normal end)
assert Task.yield_many([task]) == [{task, {:exit, :normal}}]
end
test "yield_many/2 exits on :noconnection" do
ref = make_ref()
task = %Task{ref: ref, pid: self(), owner: self()}
send self(), {:DOWN, ref, self(), self(), :noconnection}
assert catch_exit(Task.yield_many([task])) |> elem(0) == {:nodedown, :nonode@nohost}
end
test "yield_many/2 raises when invoked from a non-owner process" do
task = create_task_in_other_process()
message = "task #{inspect task} must be queried from the owner but was queried from #{inspect self()}"
assert_raise ArgumentError, message, fn -> Task.yield_many([task], 1) end
end
test "yield_many/2 returns results from multiple tasks" do
task1 = %Task{ref: make_ref(), owner: self()}
task2 = %Task{ref: make_ref(), owner: self()}
task3 = Task.async(fn -> exit :normal end)
send(self(), {task1.ref, :result})
ref = Process.monitor(task3.pid)
assert_receive {:DOWN, ^ref, _, _, :normal}
assert Task.yield_many([task1, task2, task3], 0) ==
[{task1, {:ok, :result}}, {task2, nil}, {task3, {:exit, :normal}}]
end
test "shutdown/2 returns {:ok, result} when reply and abnormal :DOWN in message queue" do
task = create_dummy_task()
send(self(), {task.ref, :result})
send(self(), {:DOWN, task.ref, :process, task.pid, :abnormal})
assert Task.shutdown(task) == {:ok, :result}
refute_received {:DOWN, _, _, _, _}
end
test "shutdown/2 returns {:ok, result} when reply and normal :DOWN in message queue" do
task = create_dummy_task()
send(self(), {task.ref, :result})
send(self(), {:DOWN, task.ref, :process, task.pid, :normal})
assert Task.shutdown(task) == {:ok, :result}
refute_received {:DOWN, _, _, _, _}
end
test "shutdown/2 returns {:ok, result} when reply and shutdown :DOWN in message queue" do
task = create_dummy_task()
send(self(), {task.ref, :result})
send(self(), {:DOWN, task.ref, :process, task.pid, :shutdown})
assert Task.shutdown(task) == {:ok, :result}
refute_received {:DOWN, _, _, _, _}
end
test "shutdown/2 returns nil on shutting down task" do
task = Task.async(:timer, :sleep, [:infinity])
assert Task.shutdown(task) == nil
end
test "shutdown/2 return exit on abnormal :DOWN in message queue" do
task = create_dummy_task()
send(self(), {:DOWN, task.ref, :process, task.pid, :abnormal})
assert Task.shutdown(task) == {:exit, :abnormal}
end
test "shutdown/2 return exit on normal :DOWN in message queue" do
task = create_dummy_task()
send(self(), {:DOWN, task.ref, :process, task.pid, :normal})
assert Task.shutdown(task) == {:exit, :normal}
end
test "shutdown/2 returns nil on shutdown :DOWN in message queue" do
task = create_dummy_task()
send(self(), {:DOWN, task.ref, :process, task.pid, :shutdown})
assert Task.shutdown(task) == nil
end
test "shutdown/2 return exit on killed :DOWN in message queue" do
task = create_dummy_task()
send(self(), {:DOWN, task.ref, :process, task.pid, :killed})
assert Task.shutdown(task) == {:exit, :killed}
end
test "shutdown/2 exits on noconnection :DOWN in message queue" do
task = create_dummy_task()
send(self(), {:DOWN, task.ref, :process, task.pid, :noconnection})
assert catch_exit(Task.shutdown(task)) ==
{{:nodedown, node()}, {Task, :shutdown, [task, 5000]}}
end
test "shutdown/2 raises if task pid is nil" do
task = %Task{ref: make_ref, pid: nil}
assert_raise ArgumentError, "task #{inspect task} does not have an associated task process",
fn -> Task.shutdown(task) end
end
test "shutdown/2 raises when invoked from a non-owner process" do
task = create_task_in_other_process()
message = "task #{inspect task} must be queried from the owner but was queried from #{inspect self()}"
assert_raise ArgumentError, message, fn -> Task.shutdown(task) end
end
test "shutdown/2 brutal_ kill returns {:ok, result} when reply and abnormal :DOWN in message queue" do
task = create_dummy_task()
send(self(), {task.ref, :result})
send(self(), {:DOWN, task.ref, :process, task.pid, :abnormal})
assert Task.shutdown(task, :brutal_kill) == {:ok, :result}
refute_received {:DOWN, _, _, _, _}
end
test "shutdown/2 brutal kill returns {:ok, result} when reply and normal :DOWN in message queue" do
task = create_dummy_task()
send(self(), {task.ref, :result})
send(self(), {:DOWN, task.ref, :process, task.pid, :normal})
assert Task.shutdown(task, :brutal_kill) == {:ok, :result}
refute_received {:DOWN, _, _, _, _}
end
test "shutdown/2 brutal kill returns {:ok, result} when reply and shutdown :DOWN in message queue" do
task = create_dummy_task()
send(self(), {task.ref, :result})
send(self(), {:DOWN, task.ref, :process, task.pid, :shutdown})
assert Task.shutdown(task, :brutal_kill) == {:ok, :result}
refute_received {:DOWN, _, _, _, _}
end
test "shutdown/2 brutal kill returns exit on abnormal :DOWN in message queue" do
task = create_dummy_task()
send(self(), {:DOWN, task.ref, :process, task.pid, :abnormal})
assert Task.shutdown(task, :brutal_kill) == {:exit, :abnormal}
end
test "shutdown/2 brutal kill returns exit on normal :DOWN in message queue" do
task = create_dummy_task()
send(self(), {:DOWN, task.ref, :process, task.pid, :normal})
assert Task.shutdown(task, :brutal_kill) == {:exit, :normal}
end
test "shutdown/2 brutal kill returns exit on shutdown :DOWN in message queue" do
task = create_dummy_task()
send(self(), {:DOWN, task.ref, :process, task.pid, :shutdown})
assert Task.shutdown(task, :brutal_kill) == {:exit, :shutdown}
end
test "shutdown/2 brutal kill exits on noconnection :DOWN in message queue" do
task = create_dummy_task()
send(self(), {:DOWN, task.ref, :process, task.pid, :noconnection})
assert catch_exit(Task.shutdown(task, :brutal_kill)) ==
{{:nodedown, node()}, {Task, :shutdown, [task, :brutal_kill]}}
end
test "shutdown/2 returns exit on killing task after shutdown timeout" do
caller = self()
task = Task.async(fn() ->
Process.flag(:trap_exit, true)
wait_and_send(caller, :ready)
:timer.sleep(:infinity)
end)
receive do: (:ready -> :ok)
assert Task.shutdown(task, 1) == {:exit, :killed}
end
test "shutdown/2 returns nil on killing task" do
caller = self()
task = Task.async(fn() ->
Process.flag(:trap_exit, true)
wait_and_send(caller, :ready)
:timer.sleep(:infinity)
end)
receive do: (:ready -> :ok)
assert Task.shutdown(task, :brutal_kill) == nil
refute_received {:DOWN, _, _, _, _}
end
end
| 33.466042 | 106 | 0.642617 |
9eaf2f324c3d87c99dd0e50b69f591d92b47b396 | 621 | ex | Elixir | gallows/lib/gallows_web/views/helpers/game_state_helper.ex | wronfim/hangman_game | c4dc4b9f122e773fe87ac4dc88206b792c1b239e | [
"MIT"
] | null | null | null | gallows/lib/gallows_web/views/helpers/game_state_helper.ex | wronfim/hangman_game | c4dc4b9f122e773fe87ac4dc88206b792c1b239e | [
"MIT"
] | null | null | null | gallows/lib/gallows_web/views/helpers/game_state_helper.ex | wronfim/hangman_game | c4dc4b9f122e773fe87ac4dc88206b792c1b239e | [
"MIT"
] | null | null | null | defmodule GallowsWeb.Views.Helpers.GameStateHelper do
import Phoenix.HTML, only: [raw: 1]
@responses %{
won: {:success, "You Won!"},
lost: {:danger, "You Lost!"},
good_guess: {:success, "Good guess!"},
bad_guess: {:warning, "Bad guess!"},
already_used: {:info, "You already guessed that!"},
initializing: {:info, "Let's Play"},
}
def game_state(state) do
@responses[state]
|> alert()
end
defp alert(nil), do: ""
defp alert({class, message}) do
"""
<div class="alert alert-#{class}">
#{message}
</div>
"""
|> raw()
end
end
| 22.178571 | 55 | 0.558776 |
9eaf357cf1ddadcaf74d71d816591c944721aa09 | 720 | ex | Elixir | lib/cforum/jobs/database_maintenance_job.ex | MatthiasApsel/cforum_ex | 52c621a583182d82692b74694b0b2792ac23b8ff | [
"MIT"
] | null | null | null | lib/cforum/jobs/database_maintenance_job.ex | MatthiasApsel/cforum_ex | 52c621a583182d82692b74694b0b2792ac23b8ff | [
"MIT"
] | null | null | null | lib/cforum/jobs/database_maintenance_job.ex | MatthiasApsel/cforum_ex | 52c621a583182d82692b74694b0b2792ac23b8ff | [
"MIT"
] | null | null | null | defmodule Cforum.Jobs.DatabaseMaintenanceJob do
use Oban.Worker, queue: :background, max_attempts: 5
import Ecto.Query, warn: false
require Logger
alias Cforum.Repo
@impl Oban.Worker
def perform(_, _) do
Logger.info("Starting database maintenance…")
Logger.info("VACCUM FULL ANALYZE…")
Repo.query!("VACUUM FULL ANALYZE", [], timeout: :infinity, pool_timeout: :infinity)
Logger.info("ANALYZE…")
Repo.query!("ANALYZE", [], timeout: :infinity, pool_timeout: :infinity)
Logger.info("REINDEX…")
cfg = Repo.config()
Repo.query!("REINDEX DATABASE #{cfg[:database]}", [], timeout: :infinity, pool_timeout: :infinity)
Logger.info("database maintenance finished!")
end
end
| 27.692308 | 102 | 0.694444 |
9eaf49c1b0ae7815c29edbc30a898144c823d788 | 12,723 | ex | Elixir | lib/ecto/adapters/myxql.ex | jeroenvisser101/ecto_sql | f189802129ac866b2d376ce073b07809a3317238 | [
"Apache-2.0"
] | null | null | null | lib/ecto/adapters/myxql.ex | jeroenvisser101/ecto_sql | f189802129ac866b2d376ce073b07809a3317238 | [
"Apache-2.0"
] | null | null | null | lib/ecto/adapters/myxql.ex | jeroenvisser101/ecto_sql | f189802129ac866b2d376ce073b07809a3317238 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Adapters.MyXQL do
@moduledoc """
Adapter module for MySQL.
It uses `MyXQL` for communicating to the database.
## Options
MySQL options split in different categories described
below. All options can be given via the repository
configuration:
### Connection options
* `:protocol` - Set to `:socket` for using UNIX domain socket, or `:tcp` for TCP
(default: `:socket`)
* `:socket` - Connect to MySQL via UNIX sockets in the given path.
* `:hostname` - Server hostname
* `:port` - Server port (default: 3306)
* `:username` - Username
* `:password` - User password
* `:database` - the database to connect to
* `:pool` - The connection pool module, defaults to `DBConnection.ConnectionPool`
* `:ssl` - Set to true if ssl should be used (default: false)
* `:ssl_opts` - A list of ssl options, see Erlang's `ssl` docs
* `:connect_timeout` - The timeout for establishing new connections (default: 5000)
* `:cli_protocol` - The protocol used for the mysql client connection (default: `"tcp"`).
This option is only used for `mix ecto.load` and `mix ecto.dump`,
via the `mysql` command. For more information, please check
[MySQL docs](https://dev.mysql.com/doc/en/connecting.html)
* `:socket_options` - Specifies socket configuration
* `:show_sensitive_data_on_connection_error` - show connection data and
configuration whenever there is an error attempting to connect to the
database
The `:socket_options` are particularly useful when configuring the size
of both send and receive buffers. For example, when Ecto starts with a
pool of 20 connections, the memory usage may quickly grow from 20MB to
50MB based on the operating system default values for TCP buffers. It is
advised to stick with the operating system defaults but they can be
tweaked if desired:
socket_options: [recbuf: 8192, sndbuf: 8192]
We also recommend developers to consult the `MyXQL.start_link/1` documentation
for a complete listing of all supported options.
### Storage options
* `:charset` - the database encoding (default: "utf8mb4")
* `:collation` - the collation order
* `:dump_path` - where to place dumped structures
### After connect callback
If you want to execute a callback as soon as connection is established
to the database, you can use the `:after_connect` configuration. For
example, in your repository configuration you can add:
after_connect: {MyXQL, :query!, ["SET variable = value", []]}
You can also specify your own module that will receive the MyXQL
connection as argument.
## Limitations
There are some limitations when using Ecto with MySQL that one
needs to be aware of.
### Engine
Tables created by Ecto are guaranteed to use InnoDB, regardless
of the MySQL version.
### UUIDs
MySQL does not support UUID types. Ecto emulates them by using
`binary(16)`.
### Read after writes
Because MySQL does not support RETURNING clauses in INSERT and
UPDATE, it does not support the `:read_after_writes` option of
`Ecto.Schema.field/3`.
### DDL Transaction
MySQL does not support migrations inside transactions as it
automatically commits after some commands like CREATE TABLE.
Therefore MySQL migrations does not run inside transactions.
## Old MySQL versions
### JSON support
MySQL introduced a native JSON type in v5.7.8, if your server is
using this version or higher, you may use `:map` type for your
column in migration:
add :some_field, :map
If you're using older server versions, use a `TEXT` field instead:
add :some_field, :text
in either case, the adapter will automatically encode/decode the
value from JSON.
### usec in datetime
Old MySQL versions did not support usec in datetime while
more recent versions would round or truncate the usec value.
Therefore, in case the user decides to use microseconds in
datetimes and timestamps with MySQL, be aware of such
differences and consult the documentation for your MySQL
version.
If your version of MySQL supports microsecond precision, you
will be able to utilize Ecto's usec types.
"""
# Inherit all behaviour from Ecto.Adapters.SQL
use Ecto.Adapters.SQL,
driver: :myxql,
migration_lock: "FOR UPDATE"
# And provide a custom storage implementation
@behaviour Ecto.Adapter.Storage
@behaviour Ecto.Adapter.Structure
## Custom MySQL types
@impl true
def loaders({:embed, _}, type), do: [&json_decode/1, &Ecto.Adapters.SQL.load_embed(type, &1)]
def loaders({:map, _}, type), do: [&json_decode/1, &Ecto.Adapters.SQL.load_embed(type, &1)]
def loaders(:map, type), do: [&json_decode/1, type]
def loaders(:float, type), do: [&float_decode/1, type]
def loaders(:boolean, type), do: [&bool_decode/1, type]
def loaders(:binary_id, type), do: [Ecto.UUID, type]
def loaders(_, type), do: [type]
defp bool_decode(<<0>>), do: {:ok, false}
defp bool_decode(<<1>>), do: {:ok, true}
defp bool_decode(0), do: {:ok, false}
defp bool_decode(1), do: {:ok, true}
defp bool_decode(x), do: {:ok, x}
defp float_decode(%Decimal{} = decimal), do: {:ok, Decimal.to_float(decimal)}
defp float_decode(x), do: {:ok, x}
defp json_decode(x) when is_binary(x), do: {:ok, MyXQL.json_library().decode!(x)}
defp json_decode(x), do: {:ok, x}
## Storage API
@impl true
def storage_up(opts) do
database = Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration"
opts = Keyword.delete(opts, :database)
charset = opts[:charset] || "utf8mb4"
command =
~s(CREATE DATABASE `#{database}` DEFAULT CHARACTER SET = #{charset})
|> concat_if(opts[:collation], &"DEFAULT COLLATE = #{&1}")
case run_query(command, opts) do
{:ok, _} ->
:ok
{:error, %{mysql: %{name: :ER_DB_CREATE_EXISTS}}} ->
{:error, :already_up}
{:error, error} ->
{:error, Exception.message(error)}
{:exit, exit} ->
{:error, exit_to_exception(exit)}
end
end
defp concat_if(content, nil, _fun), do: content
defp concat_if(content, value, fun), do: content <> " " <> fun.(value)
@impl true
def storage_down(opts) do
database = Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration"
opts = Keyword.delete(opts, :database)
command = "DROP DATABASE `#{database}`"
case run_query(command, opts) do
{:ok, _} ->
:ok
{:error, %{mysql: %{name: :ER_DB_DROP_EXISTS}}} ->
{:error, :already_down}
{:error, %{mysql: %{name: :ER_BAD_DB_ERROR}}} ->
{:error, :already_down}
{:exit, :killed} ->
{:error, :already_down}
{:exit, exit} ->
{:error, exit_to_exception(exit)}
end
end
@impl Ecto.Adapter.Storage
def storage_status(opts) do
database = Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration"
opts = Keyword.delete(opts, :database)
check_database_query = "SELECT schema_name FROM information_schema.schemata WHERE schema_name = '#{database}'"
case run_query(check_database_query, opts) do
{:ok, %{num_rows: 0}} -> :down
{:ok, %{num_rows: _num_rows}} -> :up
other -> {:error, other}
end
end
@impl true
def supports_ddl_transaction? do
false
end
@impl true
def insert(adapter_meta, schema_meta, params, on_conflict, returning, opts) do
%{source: source, prefix: prefix} = schema_meta
{_, query_params, _} = on_conflict
key = primary_key!(schema_meta, returning)
{fields, values} = :lists.unzip(params)
sql = @conn.insert(prefix, source, fields, [fields], on_conflict, [])
cache_statement = "ecto_insert_#{source}"
opts = [{:cache_statement, cache_statement} | opts]
case Ecto.Adapters.SQL.query(adapter_meta, sql, values ++ query_params, opts) do
{:ok, %{num_rows: 1, last_insert_id: last_insert_id}} ->
{:ok, last_insert_id(key, last_insert_id)}
{:ok, %{num_rows: 2, last_insert_id: last_insert_id}} ->
{:ok, last_insert_id(key, last_insert_id)}
{:error, err} ->
case @conn.to_constraints(err, [source: source]) do
[] -> raise err
constraints -> {:invalid, constraints}
end
end
end
defp primary_key!(%{autogenerate_id: {_, key, _type}}, [key]), do: key
defp primary_key!(_, []), do: nil
defp primary_key!(%{schema: schema}, returning) do
raise ArgumentError, "MySQL does not support :read_after_writes in schemas for non-primary keys. " <>
"The following fields in #{inspect schema} are tagged as such: #{inspect returning}"
end
defp last_insert_id(nil, _last_insert_id), do: []
defp last_insert_id(_key, 0), do: []
defp last_insert_id(key, last_insert_id), do: [{key, last_insert_id}]
@impl true
def structure_dump(default, config) do
table = config[:migration_source] || "schema_migrations"
path = config[:dump_path] || Path.join(default, "structure.sql")
with {:ok, versions} <- select_versions(table, config),
{:ok, contents} <- mysql_dump(config),
{:ok, contents} <- append_versions(table, versions, contents) do
File.mkdir_p!(Path.dirname(path))
File.write!(path, contents)
{:ok, path}
end
end
defp select_versions(table, config) do
case run_query(~s[SELECT version FROM `#{table}` ORDER BY version], config) do
{:ok, %{rows: rows}} -> {:ok, Enum.map(rows, &hd/1)}
{:error, %{mysql: %{name: :ER_NO_SUCH_TABLE}}} -> {:ok, []}
{:error, _} = error -> error
{:exit, exit} -> {:error, exit_to_exception(exit)}
end
end
defp mysql_dump(config) do
case run_with_cmd("mysqldump", config, ["--no-data", "--routines", config[:database]]) do
{output, 0} -> {:ok, output}
{output, _} -> {:error, output}
end
end
defp append_versions(_table, [], contents) do
{:ok, contents}
end
defp append_versions(table, versions, contents) do
{:ok,
contents <>
~s[INSERT INTO `#{table}` (version) VALUES ] <>
Enum.map_join(versions, ", ", &"(#{&1})") <>
~s[;\n\n]}
end
@impl true
def structure_load(default, config) do
path = config[:dump_path] || Path.join(default, "structure.sql")
args = [
"--execute", "SET FOREIGN_KEY_CHECKS = 0; SOURCE #{path}; SET FOREIGN_KEY_CHECKS = 1",
"--database", config[:database]
]
case run_with_cmd("mysql", config, args) do
{_output, 0} -> {:ok, path}
{output, _} -> {:error, output}
end
end
## Helpers
defp run_query(sql, opts) do
{:ok, _} = Application.ensure_all_started(:ecto_sql)
{:ok, _} = Application.ensure_all_started(:myxql)
opts =
opts
|> Keyword.drop([:name, :log, :pool, :pool_size])
|> Keyword.put(:backoff_type, :stop)
|> Keyword.put(:max_restarts, 0)
task = Task.Supervisor.async_nolink(Ecto.Adapters.SQL.StorageSupervisor, fn ->
{:ok, conn} = MyXQL.start_link(opts)
value = MyXQL.query(conn, sql, [], opts)
GenServer.stop(conn)
value
end)
timeout = Keyword.get(opts, :timeout, 15_000)
case Task.yield(task, timeout) || Task.shutdown(task) do
{:ok, {:ok, result}} ->
{:ok, result}
{:ok, {:error, error}} ->
{:error, error}
{:exit, exit} ->
{:exit, exit}
nil ->
{:error, RuntimeError.exception("command timed out")}
end
end
defp exit_to_exception({%{__struct__: struct} = error, _})
when struct in [MyXQL.Error, DBConnection.Error],
do: error
defp exit_to_exception(reason), do: RuntimeError.exception(Exception.format_exit(reason))
defp run_with_cmd(cmd, opts, opt_args) do
unless System.find_executable(cmd) do
raise "could not find executable `#{cmd}` in path, " <>
"please guarantee it is available before running ecto commands"
end
env =
if password = opts[:password] do
[{"MYSQL_PWD", password}]
else
[]
end
host = opts[:hostname] || System.get_env("MYSQL_HOST") || "localhost"
port = opts[:port] || System.get_env("MYSQL_TCP_PORT") || "3306"
protocol = opts[:cli_protocol] || System.get_env("MYSQL_CLI_PROTOCOL") || "tcp"
user_args =
if username = opts[:username] do
["--user", username]
else
[]
end
args =
[
"--host", host,
"--port", to_string(port),
"--protocol", protocol
] ++ user_args ++ opt_args
System.cmd(cmd, args, env: env, stderr_to_stdout: true)
end
end
| 32.706941 | 114 | 0.649375 |
9eaf7d93e785f55142428e2f10d082429256829b | 1,404 | ex | Elixir | lib/plausible_web/endpoint.ex | pmhoudry/plausible | 454feec36e62b866ae86e07a1f4133d9782d4365 | [
"MIT"
] | null | null | null | lib/plausible_web/endpoint.ex | pmhoudry/plausible | 454feec36e62b866ae86e07a1f4133d9782d4365 | [
"MIT"
] | null | null | null | lib/plausible_web/endpoint.ex | pmhoudry/plausible | 454feec36e62b866ae86e07a1f4133d9782d4365 | [
"MIT"
] | null | null | null | defmodule PlausibleWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :plausible
use Sentry.Phoenix.Endpoint
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :plausible,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session,
store: :cookie,
key: "_plausible_key",
signing_salt: "3IL0ob4k",
max_age: 60*60*24*365*5, # 5 years, this is super long but the SlidingSessionTimeout will log people out if they don't return for 2 weeks
extra: "SameSite=Lax"
plug CORSPlug
plug PlausibleWeb.Router
def clean_url() do
url = PlausibleWeb.Endpoint.url
if Mix.env() == :prod do
URI.parse(url) |> Map.put(:port, nil) |> URI.to_string()
else
url
end
end
end
| 25.527273 | 141 | 0.692308 |
9eaf918986cc853f21cda55240c1fb33fea5aee9 | 1,722 | ex | Elixir | apps/discordbot/lib/discordbot/model/voice_identify.ex | alexweav/discordbot | 1ee138f7c42a6901ab769e2ce59a6878bf603290 | [
"MIT"
] | 4 | 2018-11-19T06:10:52.000Z | 2022-02-03T01:50:23.000Z | apps/discordbot/lib/discordbot/model/voice_identify.ex | alexweav/discordbot | 1ee138f7c42a6901ab769e2ce59a6878bf603290 | [
"MIT"
] | 254 | 2018-11-19T06:08:51.000Z | 2021-07-22T13:47:26.000Z | apps/discordbot/lib/discordbot/model/voice_identify.ex | alexweav/discordbot | 1ee138f7c42a6901ab769e2ce59a6878bf603290 | [
"MIT"
] | null | null | null | defmodule DiscordBot.Model.VoiceIdentify do
@derive [Poison.Encoder]
@moduledoc """
Identifies the client over the voice control websocket.
"""
use DiscordBot.Model.Serializable
alias DiscordBot.Model.{Serializable, VoicePayload}
defstruct([
:server_id,
:user_id,
:session_id,
:token
])
@typedoc """
The ID of the server (guild) that we are connecting in.
"""
@type server_id :: String.t()
@typedoc """
The ID of the user connecting to the voice websocket.
"""
@type user_id :: String.t()
@typedoc """
The ID of the voice session.
"""
@type session_id :: String.t()
@typedoc """
A token which authenticates this client.
"""
@type token :: String.t()
@type t :: %__MODULE__{
server_id: server_id,
user_id: user_id,
session_id: session_id,
token: token
}
@doc """
Converts a JSON map to a voice identify struct.
"""
@spec from_map(map) :: __MODULE__.t()
def from_map(map) do
map
|> Serializable.struct_from_map(as: %__MODULE__{})
end
@doc """
Builds the VoiceIdentify struct.
"""
@spec voice_identify(String.t(), String.t(), String.t(), String.t()) :: __MODULE__.t()
def new(server_id, user_id, session_id, token) do
%__MODULE__{
server_id: server_id,
user_id: user_id,
session_id: session_id,
token: token
}
end
@doc """
Builds the VoiceIdentify struct and wraps it in a payload.
"""
@spec voice_identify(String.t(), String.t(), String.t(), String.t()) :: VoicePayload.t()
def voice_identify(server_id, user_id, session_id, token) do
VoicePayload.payload(:identify, new(server_id, user_id, session_id, token))
end
end
| 22.96 | 90 | 0.642276 |
9eaf96e0efdcba430848c78eea1057b6078e9f7d | 1,245 | exs | Elixir | mix.exs | beamuav/elixir_mavlink_util | caab0b8d096e67f12512fa87197e73f53ccda5fd | [
"MIT"
] | null | null | null | mix.exs | beamuav/elixir_mavlink_util | caab0b8d096e67f12512fa87197e73f53ccda5fd | [
"MIT"
] | 1 | 2021-05-20T22:25:53.000Z | 2021-05-20T22:25:53.000Z | mix.exs | beamuav/elixir_mavlink_util | caab0b8d096e67f12512fa87197e73f53ccda5fd | [
"MIT"
] | null | null | null | defmodule MAVLink.Util.MixProject do
use Mix.Project
def project do
[
app: :mavlink_util,
version: "0.1.0",
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/robinhilliard/elixir-mavlink-util",
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
mod: {MAVLink.Util.Application, []},
extra_applications: [:mavlink, :logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:mavlink, "~> 0.9.0"}
]
end
defp description() do
"A helper layer on top of MAVLink for performing common commands
and tasks with one or more remote vehicles. It can either be
used as an API or directly from iex with an experience similar
to Ardupilot's MAVProxy."
end
defp package() do
[
name: "mavlink_util",
files: ["lib", "mix.exs", "README.md", "LICENSE"],
exclude_patterns: [".DS_Store"],
licenses: ["MIT"],
links: %{"Github" => "https://github.com/beamuav/elixir-mavlink-util"},
maintainers: ["Robin Hilliard"]
]
end
end
| 24.9 | 77 | 0.611245 |
9eafa566df7754767b68d5534a25fd984a1bc711 | 4,297 | ex | Elixir | apps/omg_watcher/lib/web/controllers/challenge.ex | Pongch/elixir-omg | 8a33c246898b49cba62b847e0989d9b6c89f5106 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher/lib/web/controllers/challenge.ex | Pongch/elixir-omg | 8a33c246898b49cba62b847e0989d9b6c89f5106 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher/lib/web/controllers/challenge.ex | Pongch/elixir-omg | 8a33c246898b49cba62b847e0989d9b6c89f5106 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Watcher.Web.Controller.Challenge do
@moduledoc """
Handles exit challenges
"""
use OMG.Watcher.Web, :controller
use PhoenixSwagger
alias OMG.API.Utxo
require Utxo
alias OMG.Watcher.Challenger
alias OMG.Watcher.Web.View
import OMG.Watcher.Web.ErrorHandler
@doc """
Challenges exits
"""
def get_utxo_challenge(conn, %{"utxo_pos" => utxo_pos}) do
{utxo_pos, ""} = Integer.parse(utxo_pos)
utxo_pos = utxo_pos |> Utxo.Position.decode()
Challenger.create_challenge(utxo_pos)
|> respond(conn)
end
defp respond({:ok, challenge}, conn) do
render(conn, View.Challenge, :challenge, challenge: challenge)
end
defp respond({:error, code}, conn) do
handle_error(conn, code)
end
def swagger_definitions do
%{
Challenge:
swagger_schema do
title("Exit challenge")
properties do
cutxopos(:string, "Challenging utxo position", required: true)
eutxoindex(:string, "Exiting utxo position", required: true)
txbytes(:string, "Transaction that spends exiting utxo", required: true)
proof(:string, "Proof that transaction is contained in a block", required: true)
sigs(:string, "Signatures of users that participated in the challenging transaction", required: true)
end
example(%{
cutxopos: "100001001",
eutxoindex: "200001001",
proof:
"0000000000000000000000000000000000000000000000000000000000000000AD3228B676F7D3CD4284A5443F17F1962B36E491B30A40B2405849E597BA5FB5B4C11951957C6F8F642C4AF61CD6B24640FEC6DC7FC607EE8206A99E92410D3021DDB9A356815C3FAC1026B6DEC5DF3124AFBADB485C9BA5A3E3398A04B7BA85E58769B32A1BEAF1EA27375A44095A0D1FB664CE2DD358E7FCBFB78C26A193440EB01EBFC9ED27500CD4DFC979272D1F0913CC9F66540D7E8005811109E1CF2D887C22BD8750D34016AC3C66B5FF102DACDD73F6B014E710B51E8022AF9A1968FFD70157E48063FC33C97A050F7F640233BF646CC98D9524C6B92BCF3AB56F839867CC5F7F196B93BAE1E27E6320742445D290F2263827498B54FEC539F756AFCEFAD4E508C098B9A7E1D8FEB19955FB02BA9675585078710969D3440F5054E0F9DC3E7FE016E050EFF260334F18A5D4FE391D82092319F5964F2E2EB7C1C3A5F8B13A49E282F609C317A833FB8D976D11517C571D1221A265D25AF778ECF8923490C6CEEB450AECDC82E28293031D10C7D73BF85E57BF041A97360AA2C5D99CC1DF82D9C4B87413EAE2EF048F94B4D3554CEA73D92B0F7AF96E0271C691E2BB5C67ADD7C6CAF302256ADEDF7AB114DA0ACFE870D449A3A489F781D659E8BECCDA7BCE9F4E8618B6BD2F4132CE798CDC7A60E7E1460A7299E3C6342A579626D2",
sigs:
"6BFB9B2DBE3201BDC48072E69148A0ED9AF3E01D87772C8A77A478F998CEB5236B0AE64FAB3C21C078188B162D86913010A988E4B0CE68EE95D86783008FD9C71B0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
txbytes:
"F849822AF880808080809400000000000000000000000000000000000000009489F5AD3F771617E853451A93F7A73E48CF5550D104948CE5C73FD5BEFFE0DCBCB6AFE571A2A3E73B043C03"
})
end,
BadRequestError:
swagger_schema do
title("Bad request")
description("Erroneous request from the user")
properties do
error(:string, "The message of the error raised", required: true)
end
example(%{
error: "exit is valid"
})
end
}
end
swagger_path :get_challenge_data do
get("/utxo/{utxo_pos}/challenge_data")
summary("Gets challenge for a given exit")
parameters do
utxo_pos(:path, :integer, "The position of the exiting utxo", required: true)
end
response(200, "OK", Schema.ref(:Challenge))
response(400, "Client Error", Schema.ref(:BadRequestError))
end
end
| 41.718447 | 1,041 | 0.759832 |
9eb004f8e76e4350986615b4d9e5db2621622895 | 776 | ex | Elixir | lib/perspective/event_chain/page_manifest/page_manifest_state.ex | backmath/perspective | a0a577d0ffb06805b64e4dcb171a093e051884b0 | [
"MIT"
] | 2 | 2020-04-24T19:43:06.000Z | 2020-04-24T19:52:27.000Z | lib/perspective/event_chain/page_manifest/page_manifest_state.ex | backmath/perspective | a0a577d0ffb06805b64e4dcb171a093e051884b0 | [
"MIT"
] | null | null | null | lib/perspective/event_chain/page_manifest/page_manifest_state.ex | backmath/perspective | a0a577d0ffb06805b64e4dcb171a093e051884b0 | [
"MIT"
] | null | null | null | defmodule Perspective.EventChain.PageManifest.State do
defstruct pages: []
alias __MODULE__
def new() do
page_name = new_page_name()
%State{
pages: [page_name]
}
end
def current_page(%State{pages: pages}) do
List.last(pages)
end
def add_page(%State{pages: pages} = state) do
next_page_name =
State.current_page(state)
|> next_page_number()
|> new_page_name()
%State{
pages: pages ++ [next_page_name]
}
end
defp next_page_number(name) do
~r/event-chain\.(.*)\.json/
|> Regex.run(name)
|> Enum.at(1)
|> String.to_integer()
|> Kernel.+(1)
end
defp new_page_name(count \\ 0) do
number = String.pad_leading("#{count}", 7, "0")
"event-chain.#{number}.json"
end
end
| 18.926829 | 54 | 0.612113 |
9eb0172343595ff7d9683aca256b80d160c742ca | 516 | ex | Elixir | lib/plug/quiet_logger.ex | Driftrock/quiet_logger | e0ec525bc22d17b135709e7f48b924bb037c62bf | [
"Apache-2.0"
] | 1 | 2019-01-24T09:09:14.000Z | 2019-01-24T09:09:14.000Z | lib/plug/quiet_logger.ex | Driftrock/quiet_logger | e0ec525bc22d17b135709e7f48b924bb037c62bf | [
"Apache-2.0"
] | 1 | 2018-07-24T11:05:48.000Z | 2018-07-24T14:33:26.000Z | lib/plug/quiet_logger.ex | Driftrock/quiet_logger | e0ec525bc22d17b135709e7f48b924bb037c62bf | [
"Apache-2.0"
] | 1 | 2018-07-16T13:42:16.000Z | 2018-07-16T13:42:16.000Z | defmodule Plug.QuietLogger do
@behaviour Plug
def init(opts) do
path = Keyword.get(opts, :path, "/health-check")
log = Keyword.get(opts, :log, :info)
%{log: log, path: path}
end
def call(%{request_path: path} = conn, %{log: log, path: paths}) when is_list(paths) do
if path in paths, do: conn, else: Plug.Logger.call(conn, log)
end
def call(%{request_path: path} = conn, %{log: :info, path: path}), do: conn
def call(conn, %{log: log}) do
Plug.Logger.call(conn, log)
end
end
| 24.571429 | 89 | 0.633721 |
9eb0402b087c667b97b60be7e5ea2861def7f103 | 2,123 | ex | Elixir | lib/credo/cli/command/info/info_output.ex | kanmaniselvan/credo | 276e0fc24d1bf56c8fc2902a9e933c8f208ce391 | [
"MIT"
] | 1 | 2021-12-01T13:37:43.000Z | 2021-12-01T13:37:43.000Z | lib/credo/cli/command/info/info_output.ex | kanmaniselvan/credo | 276e0fc24d1bf56c8fc2902a9e933c8f208ce391 | [
"MIT"
] | null | null | null | lib/credo/cli/command/info/info_output.ex | kanmaniselvan/credo | 276e0fc24d1bf56c8fc2902a9e933c8f208ce391 | [
"MIT"
] | 1 | 2019-10-08T16:42:40.000Z | 2019-10-08T16:42:40.000Z | defmodule Credo.CLI.Command.Info.InfoOutput do
@moduledoc false
use Credo.CLI.Output.FormatDelegator,
default: Credo.CLI.Command.Info.Output.Default,
json: Credo.CLI.Command.Info.Output.Json
alias Credo.CLI.Output.UI
def print(exec, info) do
format_mod = format_mod(exec)
format_mod.print(exec, info)
end
def print_help(exec) do
usage = ["Usage: ", :olive, "mix credo info [options]"]
description = """
Shows information about Credo and its environment.
"""
example = [
"Example: ",
:olive,
:faint,
"$ mix credo info --format=json --verbose"
]
options = """
Info options:
-c, --checks Only include checks that match the given strings
--checks-with-tag Only include checks that match the given tag (can be used multiple times)
--checks-without-tag Ignore checks that match the given tag (can be used multiple times)
--config-file Use the given config file
-C, --config-name Use the given config instead of "default"
--enable-disabled-checks Re-enable disabled checks that match the given strings
--files-included Only include these files (accepts globs, can be used multiple times)
--files-excluded Exclude these files (accepts globs, can be used multiple times)
--format Display the list in a specific format (json,flycheck,oneline)
-i, --ignore-checks Ignore checks that match the given strings
--ignore Alias for --ignore-checks
--min-priority Minimum priority to show issues (high,medium,normal,low,lower or number)
--only Alias for --checks
--verbose Display more information (e.g. checked files)
General options:
-v, --version Show version
-h, --help Show this help
"""
UI.puts(usage)
UI.puts(description)
UI.puts(example)
UI.puts(options)
exec
end
end
| 34.241935 | 109 | 0.595384 |
9eb05da9c8f5a8e5109e6d362d3bd5650855ed91 | 1,689 | exs | Elixir | test/central_web/controllers/logging/audit_log_controller_test.exs | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | null | null | null | test/central_web/controllers/logging/audit_log_controller_test.exs | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | null | null | null | test/central_web/controllers/logging/audit_log_controller_test.exs | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | null | null | null | defmodule CentralWeb.Logging.AuditLogControllerTest do
use CentralWeb.ConnCase, async: true
# alias CentralWeb.Logging.AuditLog
alias Central.Logging.Helpers
# @valid_attrs %{action: "some content", details: "{}", ip: "some content"}
# @invalid_attrs %{}
alias Central.Helpers.GeneralTestLib
alias Central.Logging.LoggingTestLib
setup do
GeneralTestLib.conn_setup(~w(logging.audit.show))
|> LoggingTestLib.logging_setup()
end
test "lists all entries on index", %{conn: conn} do
conn = get(conn, Routes.logging_audit_log_path(conn, :index))
assert html_response(conn, 200) =~ "Audit logs"
end
test "searches logs", %{conn: conn, user: user} do
conn =
post(conn, Routes.logging_audit_log_path(conn, :search),
search: %{
name: "Test",
action: "Bedrock object import",
centaur_user: "##{user.id}"
}
)
assert html_response(conn, 200) =~ "Audit logs"
end
test "searches logs (empty values)", %{conn: conn} do
conn =
post(conn, Routes.logging_audit_log_path(conn, :search),
search: %{
name: "",
action: "All",
centaur_user: ""
}
)
assert html_response(conn, 200) =~ "Audit logs"
end
test "shows chosen resource", %{conn: conn} do
# We need to call a path first because currently the conn has
# no current_user assigned
conn = get(conn, Routes.logging_audit_log_path(conn, :index))
audit_log = Helpers.add_audit_log(conn, "action", %{})
conn = get(conn, Routes.logging_audit_log_path(conn, :show, audit_log))
assert html_response(conn, 200) =~ "Audit log ##{audit_log.id}"
end
end
| 29.12069 | 77 | 0.648313 |
9eb082f95b267e7b7c9426b3e5e7dd77f810f61e | 682 | ex | Elixir | lib/bolt/schema/action_group.ex | CyberFlameGO/bolt | 225e6276983bec646e7f13519df066e8e1e770ed | [
"ISC"
] | null | null | null | lib/bolt/schema/action_group.ex | CyberFlameGO/bolt | 225e6276983bec646e7f13519df066e8e1e770ed | [
"ISC"
] | null | null | null | lib/bolt/schema/action_group.ex | CyberFlameGO/bolt | 225e6276983bec646e7f13519df066e8e1e770ed | [
"ISC"
] | null | null | null | defmodule Bolt.Schema.ActionGroup do
@moduledoc "A collection of actions to run"
alias Bolt.Schema.Action
import Ecto.Changeset
use Ecto.Schema
schema "action_group" do
field :guild_id, :id
field :name, :string
field :deduplicate, :boolean, default: true
has_many :actions, Action, foreign_key: :group_id, on_replace: :delete
end
@type t :: %__MODULE__{}
@spec changeset(%__MODULE__{}, map()) :: Changeset.t()
def changeset(group, params \\ %{}) do
group
|> cast(params, [:guild_id, :name, :deduplicate])
|> cast_assoc(:actions)
|> validate_length(:name, min: 1, max: 30)
|> unique_constraint([:name, :guild_id])
end
end
| 25.259259 | 74 | 0.668622 |
9eb09ba4ce032553098a010506c4a5944468123e | 2,002 | ex | Elixir | code/tooling/webapp/web/web.ex | alvarocamillont/introdu-o_elixir | 1d72d4f4b01d9312c4b066ce3c0fe8d9bfaaade1 | [
"MIT"
] | null | null | null | code/tooling/webapp/web/web.ex | alvarocamillont/introdu-o_elixir | 1d72d4f4b01d9312c4b066ce3c0fe8d9bfaaade1 | [
"MIT"
] | 1 | 2021-03-09T16:27:25.000Z | 2021-03-09T16:27:25.000Z | programming-elixir-book/code/tooling/webapp/web/web.ex | jordanhubbard/elixir-projects | dee341d672e83a45a17a4a85abd54a480f95c506 | [
"BSD-2-Clause"
] | null | null | null | #---
# Excerpted from "Programming Elixir ≥ 1.6",
# published by The Pragmatic Bookshelf.
# Copyrights apply to this code. It may not be used to create training material,
# courses, books, articles, and the like. Contact us if you are in doubt.
# We make no guarantees that this code is fit for any purpose.
# Visit http://www.pragmaticprogrammer.com/titles/elixir16 for more book information.
#---
defmodule Webapp.Web do
@moduledoc """
A module that keeps using definitions for controllers,
views and so on.
This can be used in your application as:
use Webapp.Web, :controller
use Webapp.Web, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below.
"""
def model do
quote do
use Ecto.Schema
import Ecto
import Ecto.Changeset
import Ecto.Query
end
end
def controller do
quote do
use Phoenix.Controller
alias Webapp.Repo
import Ecto
import Ecto.Query
import Webapp.Router.Helpers
import Webapp.Gettext
end
end
def view do
quote do
use Phoenix.View, root: "web/templates"
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import Webapp.Router.Helpers
import Webapp.ErrorHelpers
import Webapp.Gettext
end
end
def router do
quote do
use Phoenix.Router
end
end
def channel do
quote do
use Phoenix.Channel
alias Webapp.Repo
import Ecto
import Ecto.Query
import Webapp.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 22.244444 | 88 | 0.680819 |
9eb0dca1fcfa863a33c0cbe199a5b01011b35cfb | 6,210 | ex | Elixir | lib/rdf/query/bgp/stream.ex | rdf-elixir/rdf-ex | 7d4280ec9a912ef6ee9fc96ecdfdf26647016d6a | [
"MIT"
] | 19 | 2020-06-05T16:55:54.000Z | 2022-03-22T10:30:11.000Z | lib/rdf/query/bgp/stream.ex | rdf-elixir/rdf-ex | 7d4280ec9a912ef6ee9fc96ecdfdf26647016d6a | [
"MIT"
] | 4 | 2020-07-03T21:02:55.000Z | 2021-11-18T07:22:06.000Z | lib/rdf/query/bgp/stream.ex | rdf-elixir/rdf-ex | 7d4280ec9a912ef6ee9fc96ecdfdf26647016d6a | [
"MIT"
] | 3 | 2020-07-03T13:25:36.000Z | 2021-04-04T12:33:51.000Z | defmodule RDF.Query.BGP.Stream do
@moduledoc false
@behaviour RDF.Query.BGP.Matcher
alias RDF.Query.BGP
alias RDF.Query.BGP.{QueryPlanner, BlankNodeHandler}
alias RDF.{Graph, Description}
@impl RDF.Query.BGP.Matcher
def stream(bgp, graph, opts \\ [])
# https://www.w3.org/TR/sparql11-query/#emptyGroupPattern
def stream(%BGP{triple_patterns: []}, _, _), do: to_stream([%{}])
def stream(%BGP{triple_patterns: triple_patterns}, %Graph{} = graph, opts) do
{bnode_state, preprocessed_triple_patterns} = BlankNodeHandler.preprocess(triple_patterns)
preprocessed_triple_patterns
|> QueryPlanner.query_plan()
|> do_execute(graph)
|> BlankNodeHandler.postprocess(triple_patterns, bnode_state, opts)
end
@impl RDF.Query.BGP.Matcher
def execute(bgp, graph, opts \\ []) do
stream(bgp, graph, opts)
|> Enum.to_list()
end
defp do_execute([triple_pattern | remaining], graph) do
do_execute(remaining, graph, match(graph, triple_pattern))
end
# CAUTION: Careful with using Enum.empty?/1 on the solution stream!! The first match must be
# searched for every call in the query loop repeatedly then, which can have dramatic effects potentially.
# Only use it very close to the data (in the match/1 functions operating on data directly).
defp do_execute(triple_patterns, graph, solutions)
defp do_execute(_, _, nil), do: to_stream([])
defp do_execute([], _, solutions), do: solutions
defp do_execute([triple_pattern | remaining], graph, solutions) do
do_execute(remaining, graph, match_with_solutions(graph, triple_pattern, solutions))
end
defp match_with_solutions(graph, {s, p, o} = triple_pattern, existing_solutions)
when is_tuple(s) or is_tuple(p) or is_tuple(o) do
triple_pattern
|> apply_solutions(existing_solutions)
|> Stream.flat_map(&merging_match(&1, graph))
end
defp match_with_solutions(graph, triple_pattern, existing_solutions) do
if solutions = match(graph, triple_pattern) do
Stream.flat_map(solutions, fn solution ->
Stream.map(existing_solutions, &Map.merge(solution, &1))
end)
end
end
defp apply_solutions(triple_pattern, solutions) do
apply_solution =
case triple_pattern do
{{s}, {p}, {o}} -> fn solution -> {solution, {solution[s], solution[p], solution[o]}} end
{{s}, {p}, o} -> fn solution -> {solution, {solution[s], solution[p], o}} end
{{s}, p, {o}} -> fn solution -> {solution, {solution[s], p, solution[o]}} end
{{s}, p, o} -> fn solution -> {solution, {solution[s], p, o}} end
{s, {p}, {o}} -> fn solution -> {solution, {s, solution[p], solution[o]}} end
{s, {p}, o} -> fn solution -> {solution, {s, solution[p], o}} end
{s, p, {o}} -> fn solution -> {solution, {s, p, solution[o]}} end
_ -> nil
end
if apply_solution do
Stream.map(solutions, apply_solution)
else
solutions
end
end
defp merging_match({dependent_solution, triple_pattern}, graph) do
case match(graph, triple_pattern) do
nil ->
[]
solutions ->
Stream.map(solutions, fn solution ->
Map.merge(dependent_solution, solution)
end)
end
end
defp match(%Graph{descriptions: descriptions}, {subject_variable, _, _} = triple_pattern)
when is_atom(subject_variable) do
Stream.flat_map(descriptions, fn {subject, description} ->
case match(description, solve_variables(subject_variable, subject, triple_pattern)) do
nil ->
[]
solutions ->
Stream.map(solutions, fn solution ->
Map.put(solution, subject_variable, subject)
end)
end
end)
end
defp match(%Graph{} = graph, {subject, _, _} = triple_pattern) do
case graph[subject] do
nil -> nil
description -> match(description, triple_pattern)
end
end
defp match(%Description{predications: predications}, {_, variable, variable})
when is_atom(variable) do
matches =
Stream.filter(predications, fn {predicate, objects} -> Map.has_key?(objects, predicate) end)
unless Enum.empty?(matches) do
Stream.map(matches, fn {predicate, _} -> %{variable => predicate} end)
end
end
defp match(%Description{predications: predications}, {_, predicate_variable, object_variable})
when is_atom(predicate_variable) and is_atom(object_variable) do
Stream.flat_map(predications, fn {predicate, objects} ->
Stream.map(objects, fn {object, _} ->
%{predicate_variable => predicate, object_variable => object}
end)
end)
end
defp match(
%Description{predications: predications},
{_, predicate_variable, object}
)
when is_atom(predicate_variable) do
matches = Stream.filter(predications, fn {_, objects} -> Map.has_key?(objects, object) end)
unless Enum.empty?(matches) do
Stream.map(matches, fn {predicate, _} -> %{predicate_variable => predicate} end)
end
end
defp match(
%Description{predications: predications},
{_, predicate, object_or_variable}
) do
case predications[predicate] do
nil ->
nil
objects ->
cond do
# object_or_variable is a variable
is_atom(object_or_variable) ->
Stream.map(objects, fn {object, _} ->
%{object_or_variable => object}
end)
# object_or_variable is a object
Map.has_key?(objects, object_or_variable) ->
to_stream([%{}])
# else
true ->
nil
end
end
end
defp solve_variables(var, val, {var, var, var}), do: {val, val, val}
defp solve_variables(var, val, {s, var, var}), do: {s, val, val}
defp solve_variables(var, val, {var, p, var}), do: {val, p, val}
defp solve_variables(var, val, {var, var, o}), do: {val, val, o}
defp solve_variables(var, val, {var, p, o}), do: {val, p, o}
defp solve_variables(var, val, {s, var, o}), do: {s, val, o}
defp solve_variables(var, val, {s, p, var}), do: {s, p, val}
defp solve_variables(_, _, pattern), do: pattern
defp to_stream(enum), do: Stream.into(enum, [])
end
| 33.208556 | 107 | 0.644605 |
9eb184bbee8e375b83dcf49ffad2d0073f1cb697 | 325 | ex | Elixir | stack2/lib/stack2/sub_supervisor.ex | benjohns1/elixer-app | 6e866ec084c5e75442c0b70f66e35f61b5b74d34 | [
"MIT"
] | null | null | null | stack2/lib/stack2/sub_supervisor.ex | benjohns1/elixer-app | 6e866ec084c5e75442c0b70f66e35f61b5b74d34 | [
"MIT"
] | null | null | null | stack2/lib/stack2/sub_supervisor.ex | benjohns1/elixer-app | 6e866ec084c5e75442c0b70f66e35f61b5b74d34 | [
"MIT"
] | null | null | null | defmodule Stack2.SubSupervisor do
use Supervisor
def start_link(stash_pid) do
{:ok, _pid} = Supervisor.start_link(__MODULE__, stash_pid)
end
def init(stash_pid) do
child_processes = [
worker(Stack2.Server, [stash_pid])
]
supervise child_processes, strategy: :one_for_one
end
end | 23.214286 | 63 | 0.689231 |
9eb190fcfe7c1ac48da6b4af938e0201e1004cc4 | 9,327 | exs | Elixir | test/astarte_housekeeping/rpc/amqp_server_test.exs | rbino/astarte_housekeeping | d582175f9499ffd3eaefe3a69fcfceba8b2089b9 | [
"Apache-2.0"
] | null | null | null | test/astarte_housekeeping/rpc/amqp_server_test.exs | rbino/astarte_housekeeping | d582175f9499ffd3eaefe3a69fcfceba8b2089b9 | [
"Apache-2.0"
] | null | null | null | test/astarte_housekeeping/rpc/amqp_server_test.exs | rbino/astarte_housekeeping | d582175f9499ffd3eaefe3a69fcfceba8b2089b9 | [
"Apache-2.0"
] | null | null | null | #
# This file is part of Astarte.
#
# Copyright 2017-2018 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule Astarte.Housekeeping.RPC.HandlerTest do
use ExUnit.Case
alias Astarte.RPC.Protocol.Housekeeping.{
Call,
CreateRealm,
DoesRealmExist,
DoesRealmExistReply,
GenericErrorReply,
GenericOkReply,
GetRealm,
GetRealmReply,
GetRealmsList,
GetRealmsListReply,
Reply
}
alias Astarte.Housekeeping.RPC.Handler
alias Astarte.Housekeeping.DatabaseTestHelper
@invalid_test_realm "not~valid"
@not_existing_realm "nonexistingrealm"
@test_realm "newtestrealm"
@replication_factor 1
@public_key_pem "this_is_not_a_pem_but_it_will_do_for_tests"
defp generic_error(
error_name,
user_readable_message \\ nil,
user_readable_error_name \\ nil,
error_data \\ nil
) do
%Reply{
reply:
{:generic_error_reply,
%GenericErrorReply{
error_name: error_name,
user_readable_message: user_readable_message,
user_readable_error_name: user_readable_error_name,
error_data: error_data
}},
error: true
}
end
defp generic_ok(async \\ false) do
%Reply{reply: {:generic_ok_reply, %GenericOkReply{async_operation: async}}}
end
test "invalid empty message" do
encoded =
Call.new()
|> Call.encode()
assert Handler.handle_rpc(encoded) == {:error, :empty_call}
end
test "CreateRealm call with nil realm" do
encoded =
Call.new(call: {:create_realm, CreateRealm.new()})
|> Call.encode()
expected = generic_error("empty_name", "empty realm name")
{:ok, reply} = Handler.handle_rpc(encoded)
assert Reply.decode(reply) == expected
end
test "CreateRealm call with nil public key" do
encoded =
Call.new(call: {:create_realm, CreateRealm.new(realm: @test_realm)})
|> Call.encode()
expected = generic_error("empty_public_key", "empty jwt public key pem")
{:ok, reply} = Handler.handle_rpc(encoded)
assert Reply.decode(reply) == expected
end
test "valid call, invalid realm_name" do
encoded =
Call.new(
call:
{:create_realm,
CreateRealm.new(realm: @invalid_test_realm, jwt_public_key_pem: @public_key_pem)}
)
|> Call.encode()
{:ok, reply} = Handler.handle_rpc(encoded)
assert Reply.decode(reply) == generic_error("realm_not_allowed")
end
test "realm creation and DoesRealmExist successful call with implicit replication" do
on_exit(fn ->
DatabaseTestHelper.realm_cleanup(@test_realm)
end)
encoded =
Call.new(
call:
{:create_realm,
CreateRealm.new(realm: @test_realm, jwt_public_key_pem: @public_key_pem)}
)
|> Call.encode()
{:ok, create_reply} = Handler.handle_rpc(encoded)
assert Reply.decode(create_reply) == generic_ok()
encoded =
%Call{call: {:does_realm_exist, %DoesRealmExist{realm: @test_realm}}}
|> Call.encode()
expected = %Reply{reply: {:does_realm_exist_reply, %DoesRealmExistReply{exists: true}}}
{:ok, exists_reply} = Handler.handle_rpc(encoded)
assert Reply.decode(exists_reply) == expected
end
test "realm creation and GetRealm successful call with explicit SimpleStrategy replication" do
on_exit(fn ->
DatabaseTestHelper.realm_cleanup(@test_realm)
end)
encoded =
Call.new(
call:
{:create_realm,
CreateRealm.new(
realm: @test_realm,
jwt_public_key_pem: @public_key_pem,
replication_factor: @replication_factor
)}
)
|> Call.encode()
{:ok, create_reply} = Handler.handle_rpc(encoded)
assert Reply.decode(create_reply) == generic_ok()
encoded =
%Call{call: {:get_realm, %GetRealm{realm_name: @test_realm}}}
|> Call.encode()
expected = %Reply{
reply:
{:get_realm_reply,
%GetRealmReply{
realm_name: @test_realm,
jwt_public_key_pem: @public_key_pem,
replication_class: :SIMPLE_STRATEGY,
replication_factor: @replication_factor
}}
}
{:ok, exists_reply} = Handler.handle_rpc(encoded)
assert Reply.decode(exists_reply) == expected
end
test "realm creation and GetRealm successful call with explicit NetworkTopologyStrategy replication" do
on_exit(fn ->
DatabaseTestHelper.realm_cleanup(@test_realm)
end)
encoded =
Call.new(
call:
{:create_realm,
CreateRealm.new(
realm: @test_realm,
jwt_public_key_pem: @public_key_pem,
replication_class: :NETWORK_TOPOLOGY_STRATEGY,
datacenter_replication_factors: [{"datacenter1", 1}]
)}
)
|> Call.encode()
{:ok, create_reply} = Handler.handle_rpc(encoded)
assert Reply.decode(create_reply) == generic_ok()
encoded =
%Call{call: {:get_realm, %GetRealm{realm_name: @test_realm}}}
|> Call.encode()
expected = %Reply{
reply:
{:get_realm_reply,
%GetRealmReply{
realm_name: @test_realm,
jwt_public_key_pem: @public_key_pem,
replication_class: :NETWORK_TOPOLOGY_STRATEGY,
datacenter_replication_factors: [{"datacenter1", 1}]
}}
}
{:ok, exists_reply} = Handler.handle_rpc(encoded)
assert Reply.decode(exists_reply) == expected
end
test "realm creation fails with invalid SimpleStrategy replication" do
encoded =
Call.new(
call:
{:create_realm,
CreateRealm.new(
realm: @test_realm,
jwt_public_key_pem: @public_key_pem,
replication_factor: 9
)}
)
|> Call.encode()
{:ok, create_reply} = Handler.handle_rpc(encoded)
assert %Reply{
error: true,
reply: {:generic_error_reply, %GenericErrorReply{error_name: "invalid_replication"}}
} = Reply.decode(create_reply)
end
test "realm creation fails with invalid NetworkTopologyStrategy replication" do
encoded =
Call.new(
call:
{:create_realm,
CreateRealm.new(
realm: @test_realm,
jwt_public_key_pem: @public_key_pem,
replication_class: :NETWORK_TOPOLOGY_STRATEGY,
datacenter_replication_factors: [{"imaginarydatacenter", 3}]
)}
)
|> Call.encode()
{:ok, create_reply} = Handler.handle_rpc(encoded)
assert %Reply{
error: true,
reply: {:generic_error_reply, %GenericErrorReply{error_name: "invalid_replication"}}
} = Reply.decode(create_reply)
end
test "DoesRealmExist non-existing realm" do
encoded =
%Call{call: {:does_realm_exist, %DoesRealmExist{realm: @not_existing_realm}}}
|> Call.encode()
expected = %Reply{reply: {:does_realm_exist_reply, %DoesRealmExistReply{exists: false}}}
{:ok, enc_reply} = Handler.handle_rpc(encoded)
assert Reply.decode(enc_reply) == expected
end
test "GetRealmsList successful call" do
encoded =
%Call{call: {:get_realms_list, %GetRealmsList{}}}
|> Call.encode()
{:ok, list_reply} = Handler.handle_rpc(encoded)
assert match?(
%Reply{reply: {:get_realms_list_reply, %GetRealmsListReply{realms_names: _names}}},
Reply.decode(list_reply)
)
end
test "GetRealm successful call" do
on_exit(fn ->
DatabaseTestHelper.realm_cleanup(@test_realm)
end)
encoded =
Call.new(
call:
{:create_realm,
CreateRealm.new(
realm: @test_realm,
jwt_public_key_pem: @public_key_pem,
replication_factor: @replication_factor
)}
)
|> Call.encode()
{:ok, create_reply} = Handler.handle_rpc(encoded)
assert Reply.decode(create_reply) == generic_ok()
encoded =
%Call{call: {:get_realm, %GetRealm{realm_name: @test_realm}}}
|> Call.encode()
{:ok, reply} = Handler.handle_rpc(encoded)
expected = %Reply{
reply:
{:get_realm_reply,
%GetRealmReply{
realm_name: @test_realm,
jwt_public_key_pem: @public_key_pem,
replication_class: :SIMPLE_STRATEGY,
replication_factor: @replication_factor
}}
}
assert Reply.decode(reply) == expected
end
test "GetRealm failed call" do
encoded =
%Call{call: {:get_realm, %GetRealm{realm_name: @not_existing_realm}}}
|> Call.encode()
{:ok, reply} = Handler.handle_rpc(encoded)
expected = generic_error("realm_not_found")
assert Reply.decode(reply) == expected
end
end
| 26.878963 | 105 | 0.639112 |
9eb19db59eba6e21df026b59ec6010b0d826a90b | 768 | ex | Elixir | examples/golomb_bab.ex | mbta/solverl | 3d972db4fa173e9453357814884bd37f52a9713d | [
"MIT"
] | 30 | 2020-07-30T19:22:34.000Z | 2022-03-28T02:06:55.000Z | examples/golomb_bab.ex | mbta/solverl | 3d972db4fa173e9453357814884bd37f52a9713d | [
"MIT"
] | 1 | 2022-02-14T22:56:02.000Z | 2022-02-15T14:13:53.000Z | examples/golomb_bab.ex | mbta/solverl | 3d972db4fa173e9453357814884bd37f52a9713d | [
"MIT"
] | 4 | 2020-08-18T04:31:38.000Z | 2022-03-19T19:33:26.000Z | defmodule GolombBAB do
import MinizincSearch
import MinizincUtils
require Logger
@moduledoc false
@objective_var "obj"
@model resource_file("mzn/golomb_mybab.mzn")
def solve(opts \\ []) do
instance = MinizincInstance.new(@model, [], opts)
results = bab(
instance,
fn solution, method, _iteration ->
objective = MinizincResults.get_solution_objective(solution)
Logger.info "Intermediate solution with objective #{objective}"
## Post new constraint for the objective
[better_objective_constraint(solution, @objective_var, method)]
end
)
last_solution = MinizincResults.get_last_solution(results)
Logger.info "#{MinizincResults.get_solution_output(last_solution)}"
end
end
| 24.774194 | 71 | 0.705729 |
9eb1b6c5ea3f9b98c81a988956e69329b30f896a | 1,549 | ex | Elixir | lib/malan_web/controllers/phone_number_controller.ex | FreedomBen/malan | ec8cd6ed3694e33371f065f018b1169956f2accf | [
"MIT"
] | 3 | 2021-04-24T17:54:55.000Z | 2021-09-10T15:40:19.000Z | lib/malan_web/controllers/phone_number_controller.ex | FreedomBen/malan | ec8cd6ed3694e33371f065f018b1169956f2accf | [
"MIT"
] | 57 | 2021-04-24T03:17:16.000Z | 2022-03-27T04:50:22.000Z | lib/malan_web/controllers/phone_number_controller.ex | FreedomBen/malan | ec8cd6ed3694e33371f065f018b1169956f2accf | [
"MIT"
] | null | null | null | defmodule MalanWeb.PhoneNumberController do
use MalanWeb, :controller
alias Malan.Accounts
alias Malan.Accounts.PhoneNumber
action_fallback MalanWeb.FallbackController
def index(conn, _params) do
phone_numbers = Accounts.list_phone_numbers()
render(conn, "index.json", phone_numbers: phone_numbers)
end
def create(conn, %{"user_id" => user_id, "phone_number" => phone_number_params}) do
with {:ok, %PhoneNumber{} = phone_number} <-
Accounts.create_phone_number(user_id, phone_number_params) do
conn
|> put_status(:created)
|> put_resp_header(
"location",
Routes.user_phone_number_path(conn, :show, user_id, phone_number)
)
|> render("show.json", phone_number: phone_number)
end
end
def show(conn, %{"user_id" => _user_id, "id" => id}) do
phone_number = Accounts.get_phone_number!(id)
render(conn, "show.json", phone_number: phone_number)
end
def update(conn, %{"user_id" => _user_id, "id" => id, "phone_number" => phone_number_params}) do
phone_number = Accounts.get_phone_number!(id)
with {:ok, %PhoneNumber{} = phone_number} <-
Accounts.update_phone_number(phone_number, phone_number_params) do
render(conn, "show.json", phone_number: phone_number)
end
end
def delete(conn, %{"user_id" => _user_id, "id" => id}) do
phone_number = Accounts.get_phone_number!(id)
with {:ok, %PhoneNumber{}} <- Accounts.delete_phone_number(phone_number) do
send_resp(conn, :no_content, "")
end
end
end
| 31.612245 | 98 | 0.684958 |
9eb1bf1cd2fabb9d92c2379846b9299c618ba8a9 | 132 | exs | Elixir | test/support/repo.exs | mainframe2/addict | aa70768f20939bf1f4d36a680240cb32f36e2a79 | [
"MIT"
] | null | null | null | test/support/repo.exs | mainframe2/addict | aa70768f20939bf1f4d36a680240cb32f36e2a79 | [
"MIT"
] | null | null | null | test/support/repo.exs | mainframe2/addict | aa70768f20939bf1f4d36a680240cb32f36e2a79 | [
"MIT"
] | null | null | null | defmodule TestAddictRepo do
use Ecto.Repo, otp_app: :addict, pool: Ecto.Adapters.SQL.Sandbox, adapter: Ecto.Adapters.Postgres
end
| 33 | 99 | 0.795455 |
9eb202166d4c0abcb9b9fb3c82220d939ef153bf | 1,444 | ex | Elixir | clients/vault/lib/google_api/vault/v1/model/held_hangouts_chat_query.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/vault/lib/google_api/vault/v1/model/held_hangouts_chat_query.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/vault/lib/google_api/vault/v1/model/held_hangouts_chat_query.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Vault.V1.Model.HeldHangoutsChatQuery do
@moduledoc """
Query options for hangouts chat holds.
## Attributes
* `includeRooms` (*type:* `boolean()`, *default:* `nil`) - If true, include rooms the user has participated in.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:includeRooms => boolean()
}
field(:includeRooms)
end
defimpl Poison.Decoder, for: GoogleApi.Vault.V1.Model.HeldHangoutsChatQuery do
def decode(value, options) do
GoogleApi.Vault.V1.Model.HeldHangoutsChatQuery.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Vault.V1.Model.HeldHangoutsChatQuery do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.723404 | 115 | 0.740305 |
9eb208edb2d996477973aa098f338a4513cb4fb7 | 490 | ex | Elixir | lib/geonames/endpoints/contains.ex | vheathen/geonames-elixir | b47ef0e38462c3bdbfb1a5b710f53993d9d3056d | [
"MIT"
] | null | null | null | lib/geonames/endpoints/contains.ex | vheathen/geonames-elixir | b47ef0e38462c3bdbfb1a5b710f53993d9d3056d | [
"MIT"
] | null | null | null | lib/geonames/endpoints/contains.ex | vheathen/geonames-elixir | b47ef0e38462c3bdbfb1a5b710f53993d9d3056d | [
"MIT"
] | null | null | null | defmodule Geonames.Endpoints.Contains do
@moduledoc false
@behaviour Geonames.Endpoint
@default_arguments %{
geonameId: nil,
featureClass: nil,
featureCode: nil
}
def endpoint, do: "containsJSON"
def available_url_parameters, do: [:geonameId, :featureClass, :featureCode]
def required_url_parameters, do: [:geonameId]
def function_name, do: :contains
def url_arguments(provided_arguments) do
Map.merge(@default_arguments, provided_arguments)
end
end
| 23.333333 | 77 | 0.74898 |
9eb254e3821c7354eaeb6e8720f5a4d4dcd78f23 | 1,924 | exs | Elixir | test/validators/character_set_validator_test.exs | philippneugebauer/password-validator | 01d8f6532d4391e4d74f10548c2b32b2449eeb66 | [
"Apache-2.0"
] | null | null | null | test/validators/character_set_validator_test.exs | philippneugebauer/password-validator | 01d8f6532d4391e4d74f10548c2b32b2449eeb66 | [
"Apache-2.0"
] | null | null | null | test/validators/character_set_validator_test.exs | philippneugebauer/password-validator | 01d8f6532d4391e4d74f10548c2b32b2449eeb66 | [
"Apache-2.0"
] | null | null | null | defmodule PasswordValidator.Validators.CharacterSetValidatorTest do
use ExUnit.Case, async: true
import PasswordValidator.Validators.CharacterSetValidator, only: [validate: 2]
alias PasswordValidator.Validators.CharacterSetValidator
doctest CharacterSetValidator
test "upper_case 2" do
opts = [character_set: [upper_case: 2]]
result = validate("String", opts)
assert result == {:error, ["Not enough upper_case characters (got 1 needed 2)"]}
end
test "upper_case [0, 2]" do
opts = [character_set: [upper_case: [0, 2]]]
result = validate("String", opts)
assert result == :ok
end
test "lower_case" do
opts = [character_set: [lower_case: [1, :infinity]]]
result = validate("String", opts)
assert result == :ok
end
test "allowed_special_characters when the string contains only allowed characters" do
opts = [character_set: [allowed_special_characters: "!-_"]]
assert validate("Spec-ial!", opts) == :ok
end
test "allowed_special_characters when the string contains non-allowed characters" do
opts = [character_set: [allowed_special_characters: "!-_"]]
result = validate("String_speci@l%", opts)
assert result == {:error, ["Invalid character(s) found. (@%)"]}
end
test "multiple errors" do
opts = [character_set: [
allowed_special_characters: "!-_",
special: 3,
]]
result = validate("String_speci@l%", opts)
assert result == {:error, [
"Not enough special characters (got 1 needed 3)",
"Invalid character(s) found. (@%)",
]}
end
test "with an invalid allowed_special_characters_config" do
opts = [character_set: [
allowed_special_characters: %{a: true},
]]
error_message = "Invalid allowed_special_characters config. Got: %{a: true} when a binary (string) was expected"
assert_raise RuntimeError, error_message, fn ->
validate("str@", opts)
end
end
end
| 32.066667 | 116 | 0.682952 |
9eb2573f0429b6b9d732ebb97118a3ce672990f8 | 27,876 | exs | Elixir | old/test/blue_jet_web/controllers/sku_controller_test.exs | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 44 | 2018-05-09T01:08:57.000Z | 2021-01-19T07:25:26.000Z | old/test/blue_jet_web/controllers/sku_controller_test.exs | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 36 | 2018-05-08T23:59:54.000Z | 2018-09-28T13:50:30.000Z | old/test/blue_jet_web/controllers/sku_controller_test.exs | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 9 | 2018-05-09T14:09:19.000Z | 2021-03-21T21:04:04.000Z | defmodule BlueJetWeb.SkuControllerTest do
use BlueJetWeb.ConnCase
alias BlueJet.Identity.User
alias BlueJet.Identity
alias BlueJet.Inventory.Sku
alias BlueJet.FileStorage.File
alias BlueJet.FileStorage.FileCollection
alias BlueJet.FileStorage.FileCollectionMembership
alias BlueJet.Repo
@valid_attrs %{
"status" => "active",
"name" => "Apple",
"printName" => "APPLE",
"unitOfMeasure" => "EA",
"customData" => %{
"kind" => "Gala"
}
}
@invalid_attrs %{
"name" => ""
}
setup do
{_, %User{ default_account_id: account1_id }} = Identity.create_user(%{
fields: %{
"first_name" => Faker.Name.first_name(),
"last_name" => Faker.Name.last_name(),
"email" => "[email protected]",
"password" => "test1234",
"account_name" => Faker.Company.name()
}
})
{:ok, %{ access_token: uat1 }} = Identity.authenticate(%{ username: "[email protected]", password: "test1234", scope: "type:user" })
conn = build_conn()
|> put_req_header("accept", "application/vnd.api+json")
|> put_req_header("content-type", "application/vnd.api+json")
%{ conn: conn, uat1: uat1, account1_id: account1_id }
end
describe "POST /v1/skus" do
test "with no access token", %{conn: conn} do
conn = post(conn, "/v1/skus", %{
"data" => %{
"type" => "Sku",
"attributes" => @valid_attrs
}
})
assert conn.status == 401
end
test "with invalid attrs and rels", %{ conn: conn, uat1: uat1 } do
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = post(conn, "/v1/skus", %{
"data" => %{
"type" => "Sku",
"attributes" => @invalid_attrs
}
})
assert json_response(conn, 422)["errors"]
assert length(json_response(conn, 422)["errors"]) > 0
end
test "with valid attrs and rels", %{ conn: conn, uat1: uat1 } do
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = post(conn, "/v1/skus", %{
"data" => %{
"type" => "Sku",
"attributes" => @valid_attrs
}
})
assert json_response(conn, 201)["data"]["id"]
assert json_response(conn, 201)["data"]["attributes"]["status"] == @valid_attrs["status"]
assert json_response(conn, 201)["data"]["attributes"]["name"] == @valid_attrs["name"]
assert json_response(conn, 201)["data"]["attributes"]["printName"] == @valid_attrs["printName"]
assert json_response(conn, 201)["data"]["attributes"]["unitOfMeasure"] == @valid_attrs["unitOfMeasure"]
assert json_response(conn, 201)["data"]["attributes"]["customData"] == @valid_attrs["customData"]
assert json_response(conn, 201)["data"]["relationships"]["avatar"] == %{}
assert json_response(conn, 201)["data"]["relationships"]["fileCollections"] == %{}
end
test "with valid attrs, rels and include", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
%File{ id: avatar_id } = Repo.insert!(%File{
account_id: account1_id,
name: Faker.Lorem.word(),
status: "uploaded",
content_type: "image/png",
size_bytes: 42
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = post(conn, "/v1/skus?include=avatar", %{
"data" => %{
"type" => "Sku",
"attributes" => @valid_attrs,
"relationships" => %{
"avatar" => %{
"data" => %{
"type" => "File",
"id" => avatar_id
}
}
}
}
})
assert json_response(conn, 201)["data"]["id"]
assert json_response(conn, 201)["data"]["attributes"]["status"] == @valid_attrs["status"]
assert json_response(conn, 201)["data"]["attributes"]["name"] == @valid_attrs["name"]
assert json_response(conn, 201)["data"]["attributes"]["printName"] == @valid_attrs["printName"]
assert json_response(conn, 201)["data"]["attributes"]["unitOfMeasure"] == @valid_attrs["unitOfMeasure"]
assert json_response(conn, 201)["data"]["attributes"]["customData"] == @valid_attrs["customData"]
assert json_response(conn, 201)["data"]["relationships"]["avatar"]["data"]["id"]
assert length(Enum.filter(json_response(conn, 201)["included"], fn(item) -> item["type"] == "File" end)) == 1
end
end
describe "GET /v1/skus/:id" do
test "with no access token", %{conn: conn} do
conn = get(conn, "/v1/skus/test")
assert conn.status == 401
end
test "with access token of a different account", %{ conn: conn, uat1: uat1 } do
{:ok, %User{ default_account_id: account2_id }} = Identity.create_user(%{
fields: %{
"first_name" => Faker.Name.first_name(),
"last_name" => Faker.Name.last_name(),
"email" => "[email protected]",
"password" => "test1234",
"account_name" => Faker.Company.name()
}
})
sku = Repo.insert!(%Sku{
account_id: account2_id,
status: "active",
name: "Orange",
print_name: "ORANGE",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
assert_error_sent(404, fn ->
get(conn, "/v1/skus/#{sku.id}")
end)
end
test "with valid access token and id", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
sku = Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = get(conn, "/v1/skus/#{sku.id}")
assert json_response(conn, 200)["data"]["id"] == sku.id
assert json_response(conn, 200)["data"]["attributes"]["name"] == "Orange"
assert json_response(conn, 200)["data"]["attributes"]["customData"]["kind"] == "Blue Jay"
assert json_response(conn, 200)["data"]["attributes"]["locale"] == "en"
assert json_response(conn, 200)["data"]["relationships"]["avatar"] == %{}
end
test "with valid access token, id and locale", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
sku = Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
},
translations: %{
"zh-CN" => %{
"name" => "橙子"
}
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = get(conn, "/v1/skus/#{sku.id}?locale=zh-CN")
assert json_response(conn, 200)["data"]["id"] == sku.id
assert json_response(conn, 200)["data"]["attributes"]["name"] == "橙子"
assert json_response(conn, 200)["data"]["attributes"]["customData"]["kind"] == "Blue Jay"
assert json_response(conn, 200)["data"]["attributes"]["locale"] == "zh-CN"
assert json_response(conn, 200)["data"]["relationships"]["avatar"] == %{}
end
test "with valid access token, id, locale and include", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
%File{ id: avatar_id } = Repo.insert!(%File{
account_id: account1_id,
name: Faker.Lorem.word(),
status: "uploaded",
content_type: "image/png",
size_bytes: 42
})
sku = Repo.insert!(%Sku{
account_id: account1_id,
avatar_id: avatar_id,
status: "active",
name: "Orange",
print_name: "ORANGE",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
Repo.insert!(%FileCollection{
account_id: account1_id,
sku_id: sku.id,
label: "primary_images",
translations: %{
"zh-CN" => %{
"name" => "图片"
}
}
})
Repo.insert!(%FileCollection{
account_id: account1_id,
sku_id: sku.id,
label: "secondary_images",
translations: %{
"zh-CN" => %{
"name" => "图片"
}
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = get(conn, "/v1/skus/#{sku.id}?include=avatar,fileCollections&locale=zh-CN")
assert json_response(conn, 200)["data"]["id"] == sku.id
assert json_response(conn, 200)["data"]["attributes"]["name"] == "Orange"
assert json_response(conn, 200)["data"]["attributes"]["customData"]["kind"] == "Blue Jay"
assert json_response(conn, 200)["data"]["relationships"]["avatar"]["data"]["id"]
assert length(json_response(conn, 200)["data"]["relationships"]["fileCollections"]["data"]) == 2
assert length(Enum.filter(json_response(conn, 200)["included"], fn(item) -> item["type"] == "File" end)) == 1
assert length(Enum.filter(json_response(conn, 200)["included"], fn(item) -> item["type"] == "FileCollection" end)) == 2
assert length(Enum.filter(json_response(conn, 200)["included"], fn(item) -> item["attributes"]["name"] == "图片" end)) == 2
end
end
describe "PATCH /v1/skus/:id" do
test "with no access token", %{conn: conn} do
conn = patch(conn, "/v1/skus/test", %{
"data" => %{
"id" => "test",
"type" => "Sku",
"attributes" => @valid_attrs
}
})
assert conn.status == 401
end
test "with access token of a different account", %{ conn: conn, uat1: uat1 } do
{:ok, %User{ default_account_id: account2_id }} = Identity.create_user(%{
fields: %{
"first_name" => Faker.Name.first_name(),
"last_name" => Faker.Name.last_name(),
"email" => "[email protected]",
"password" => "test1234",
"account_name" => Faker.Company.name()
}
})
sku = Repo.insert!(%Sku{
account_id: account2_id,
status: "active",
name: "Orange",
print_name: "ORANGE",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
assert_error_sent(404, fn ->
patch(conn, "/v1/skus/#{sku.id}", %{
"data" => %{
"id" => sku.id,
"type" => "Sku",
"attributes" => @valid_attrs
}
})
end)
end
test "with valid access token, invalid attrs and rels", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
sku = Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = patch(conn, "/v1/skus/#{sku.id}", %{
"data" => %{
"id" => sku.id,
"type" => "Sku",
"attributes" => @invalid_attrs
}
})
assert json_response(conn, 422)["errors"]
assert length(json_response(conn, 422)["errors"]) > 0
end
test "with valid access token, attrs and rels", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
sku = Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = patch(conn, "/v1/skus/#{sku.id}", %{
"data" => %{
"id" => sku.id,
"type" => "Sku",
"attributes" => @valid_attrs
}
})
assert json_response(conn, 200)["data"]["id"] == sku.id
assert json_response(conn, 200)["data"]["attributes"]["status"] == @valid_attrs["status"]
assert json_response(conn, 200)["data"]["attributes"]["name"] == @valid_attrs["name"]
assert json_response(conn, 200)["data"]["attributes"]["printName"] == @valid_attrs["printName"]
assert json_response(conn, 200)["data"]["attributes"]["unitOfMeasure"] == @valid_attrs["unitOfMeasure"]
assert json_response(conn, 200)["data"]["attributes"]["customData"]["kind"] == @valid_attrs["customData"]["kind"]
assert json_response(conn, 200)["data"]["attributes"]["locale"] == "en"
end
test "with valid access token, attrs, rels and locale", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
sku = Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = patch(conn, "/v1/skus/#{sku.id}?locale=zh-CN", %{
"data" => %{
"id" => sku.id,
"type" => "Sku",
"attributes" => %{
"name" => "橙子"
}
}
})
assert json_response(conn, 200)["data"]["id"]
assert json_response(conn, 200)["data"]["attributes"]["printName"] == "ORANGE"
assert json_response(conn, 200)["data"]["attributes"]["name"] == "橙子"
assert json_response(conn, 200)["data"]["attributes"]["locale"] == "zh-CN"
end
test "with valid access token, attrs, rels, locale and include", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
%File{ id: avatar_id } = Repo.insert!(%File{
account_id: account1_id,
name: Faker.Lorem.word(),
status: "uploaded",
content_type: "image/png",
size_bytes: 42
})
sku = Repo.insert!(%Sku{
account_id: account1_id,
avatar_id: avatar_id,
status: "active",
name: "Orange",
print_name: "ORANGE",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
Repo.insert!(%FileCollection{
account_id: account1_id,
sku_id: sku.id,
label: "primary_images",
translations: %{
"zh-CN" => %{
"name" => "图片"
}
}
})
Repo.insert!(%FileCollection{
account_id: account1_id,
sku_id: sku.id,
label: "secondary_images",
translations: %{
"zh-CN" => %{
"name" => "图片"
}
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = patch(conn, "/v1/skus/#{sku.id}?locale=zh-CN&include=avatar,fileCollections", %{
"data" => %{
"id" => sku.id,
"type" => "Sku",
"attributes" => @valid_attrs
}
})
assert json_response(conn, 200)["data"]["id"]
assert json_response(conn, 200)["data"]["attributes"]["status"] == @valid_attrs["status"]
assert json_response(conn, 200)["data"]["attributes"]["name"] == @valid_attrs["name"]
assert json_response(conn, 200)["data"]["attributes"]["printName"] == @valid_attrs["printName"]
assert json_response(conn, 200)["data"]["attributes"]["unitOfMeasure"] == @valid_attrs["unitOfMeasure"]
assert json_response(conn, 200)["data"]["attributes"]["customData"]["kind"] == @valid_attrs["customData"]["kind"]
assert json_response(conn, 200)["data"]["attributes"]["locale"] == "zh-CN"
assert json_response(conn, 200)["data"]["relationships"]["avatar"]["data"]["id"]
assert length(json_response(conn, 200)["data"]["relationships"]["fileCollections"]["data"]) == 2
assert length(Enum.filter(json_response(conn, 200)["included"], fn(item) -> item["type"] == "File" end)) == 1
assert length(Enum.filter(json_response(conn, 200)["included"], fn(item) -> item["type"] == "FileCollection" end)) == 2
assert length(Enum.filter(json_response(conn, 200)["included"], fn(item) -> item["attributes"]["name"] == "图片" end)) == 2
end
end
describe "GET /v1/skus" do
test "with no access token", %{conn: conn} do
conn = get(conn, sku_path(conn, :index))
assert conn.status == 401
end
test "with valid access token", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
{:ok, %User{ default_account_id: account2_id }} = Identity.create_user(%{
fields: %{
"first_name" => Faker.Name.first_name(),
"last_name" => Faker.Name.last_name(),
"email" => "[email protected]",
"password" => "test1234",
"account_name" => Faker.Company.name()
}
})
Repo.insert!(%Sku{
account_id: account2_id,
status: "active",
name: "Orange",
print_name: "ORANGE",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE1",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = get(conn, "/v1/skus")
assert length(json_response(conn, 200)["data"]) == 2
assert json_response(conn, 200)["meta"]["resultCount"] == 2
assert json_response(conn, 200)["meta"]["totalCount"] == 2
end
test "with valid access token and pagination", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE1",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE2",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = get(conn, "/v1/skus?page[number]=2&page[size]=1")
assert length(json_response(conn, 200)["data"]) == 1
assert json_response(conn, 200)["meta"]["resultCount"] == 3
assert json_response(conn, 200)["meta"]["totalCount"] == 3
end
test "with valid access token and filter", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
Repo.insert!(%Sku{
account_id: account1_id,
status: "disabled",
name: "Orange",
print_name: "ORANGE",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE1",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE2",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = get(conn, "/v1/skus?filter[status]=active")
assert length(json_response(conn, 200)["data"]) == 2
assert json_response(conn, 200)["meta"]["resultCount"] == 2
assert json_response(conn, 200)["meta"]["totalCount"] == 3
end
test "with valid access token and locale", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
},
translations: %{
"zh-CN" => %{
"name" => "橙子"
}
}
})
Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE1",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE2",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = get(conn, "/v1/skus?locale=zh-CN")
assert length(json_response(conn, 200)["data"]) == 3
assert json_response(conn, 200)["meta"]["resultCount"] == 3
assert json_response(conn, 200)["meta"]["totalCount"] == 3
assert length(Enum.filter(json_response(conn, 200)["data"], fn(item) -> item["attributes"]["name"] == "橙子" end)) == 1
end
test "with valid access token, locale and search", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
{:ok, %User{ default_account_id: account2_id }} = Identity.create_user(%{
fields: %{
"first_name" => Faker.Name.first_name(),
"last_name" => Faker.Name.last_name(),
"email" => "[email protected]",
"password" => "test1234",
"account_name" => Faker.Company.name()
}
})
Repo.insert!(%Sku{
account_id: account2_id,
status: "active",
name: "Orange",
print_name: "ORANGE1",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Apple",
print_name: "APPLE",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE1",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
},
translations: %{
"zh-CN" => %{
"name" => "橙子"
}
}
})
Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE2",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
},
translations: %{
"zh-CN" => %{
"name" => "橙子"
}
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = get(conn, "/v1/skus?search=橙&locale=zh-CN")
assert length(json_response(conn, 200)["data"]) == 2
assert json_response(conn, 200)["meta"]["resultCount"] == 2
assert json_response(conn, 200)["meta"]["totalCount"] == 3
end
test "with valid access token, locale and include", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
%File{ id: avatar_id } = Repo.insert!(%File{
account_id: account1_id,
name: Faker.Lorem.word(),
status: "uploaded",
content_type: "image/png",
size_bytes: 42
})
Repo.insert!(%Sku{
account_id: account1_id,
avatar_id: avatar_id,
status: "active",
name: "Orange",
print_name: "ORANGE",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
%Sku{ id: sku_id } = Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE1",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
%File{ id: ef1_id } = Repo.insert!(%File{
account_id: account1_id,
name: Faker.Lorem.word(),
status: "uploaded",
content_type: "image/png",
size_bytes: 42
})
%FileCollection{ id: efc1_id } = Repo.insert!(%FileCollection{
account_id: account1_id,
sku_id: sku_id,
label: "primary_images",
translations: %{
"zh-CN" => %{
"name" => "主要图片"
}
}
})
Repo.insert!(%FileCollectionMembership{
account_id: account1_id,
collection_id: efc1_id,
file_id: ef1_id
})
Repo.insert!(%FileCollection{
account_id: account1_id,
sku_id: sku_id,
label: "secondary_images",
translations: %{
"zh-CN" => %{
"name" => "主要图片"
}
}
})
Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE2",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = get(conn, sku_path(conn, :index, include: "avatar,fileCollections.files", locale: "zh-CN"))
assert length(json_response(conn, 200)["data"]) == 3
assert json_response(conn, 200)["meta"]["resultCount"] == 3
assert json_response(conn, 200)["meta"]["totalCount"] == 3
assert length(Enum.filter(json_response(conn, 200)["included"], fn(item) -> item["type"] == "File" end)) == 2
assert length(Enum.filter(json_response(conn, 200)["included"], fn(item) -> item["type"] == "FileCollection" end)) == 2
assert length(Enum.filter(json_response(conn, 200)["included"], fn(item) -> item["attributes"]["name"] == "主要图片" end)) == 2
end
end
describe "DELETE /v1/skus/:id" do
test "with no access token", %{conn: conn} do
conn = delete(conn, sku_path(conn, :delete, "test"))
assert conn.status == 401
end
test "with access token of a different account", %{ conn: conn, uat1: uat1 } do
{:ok, %User{ default_account_id: account2_id }} = Identity.create_user(%{
fields: %{
"first_name" => Faker.Name.first_name(),
"last_name" => Faker.Name.last_name(),
"email" => "[email protected]",
"password" => "test1234",
"account_name" => Faker.Company.name()
}
})
sku = Repo.insert!(%Sku{
account_id: account2_id,
status: "active",
name: "Orange",
print_name: "ORANGE",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
assert_error_sent(404, fn ->
delete(conn, sku_path(conn, :delete, sku.id))
end)
end
test "with valid access token and id", %{ conn: conn, uat1: uat1, account1_id: account1_id } do
sku = Repo.insert!(%Sku{
account_id: account1_id,
status: "active",
name: "Orange",
print_name: "ORANGE",
unit_of_measure: "EA",
custom_data: %{
"kind" => "Blue Jay"
}
})
conn = put_req_header(conn, "authorization", "Bearer #{uat1}")
conn = delete(conn, sku_path(conn, :delete, sku.id))
assert conn.status == 204
end
end
end
| 31.605442 | 136 | 0.539496 |
9eb288b9126815af874bacad5105527a9f4a1e9f | 2,765 | exs | Elixir | architect/mix.exs | VJftw/velocity | 8335c39c510dbde1446e6cde03eebb450339d212 | [
"Apache-2.0"
] | 3 | 2017-12-09T21:05:54.000Z | 2019-08-06T08:13:34.000Z | architect/mix.exs | VJftw/velocity | 8335c39c510dbde1446e6cde03eebb450339d212 | [
"Apache-2.0"
] | 63 | 2017-09-09T15:44:24.000Z | 2022-03-03T22:16:24.000Z | architect/mix.exs | VJftw/velocity | 8335c39c510dbde1446e6cde03eebb450339d212 | [
"Apache-2.0"
] | 5 | 2017-09-14T00:17:22.000Z | 2019-11-27T14:43:45.000Z | defmodule Architect.MixProject do
use Mix.Project
def project do
[
app: :architect,
version: "0.1.0",
elixir: "~> 1.5",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext, :phoenix_swagger] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
test_coverage: [tool: ExCoveralls],
# Docs
name: "Velocity Architect",
source_url: "https://github.com/velocity-ci/velocity/architect",
homepage_url: "http://velocityci.io",
docs: [
# The main page in the docs
main: "Architect",
extras: ["README.md"]
]
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Architect.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.4.0"},
{:phoenix_pubsub, "~> 1.1"},
{:phoenix_ecto, "~> 4.0"},
{:ecto_sql, "~> 3.0"},
{:postgrex, ">= 0.0.0"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:guardian, "~> 1.0"},
{:comeonin, "~> 4.0"},
{:bcrypt_elixir, "~> 0.12"},
{:distillery, "~> 2.0"},
{:phoenix_swagger, "~> 0.8"},
{:ex_json_schema, "~> 0.5"},
{:excoveralls, "~> 0.10", only: :test},
{:credo, "~> 1.0.0", only: [:dev, :test], runtime: false},
{:absinthe, "~> 1.4.0"},
{:absinthe_phoenix, "~> 1.4.0"},
{:absinthe_ecto, "~> 0.1.3"},
{:absinthe_relay, "~> 1.4"},
{:commanded, "~> 0.19"},
{:poison, "~> 3.1 or ~> 4.0"},
{:kronky, "~> 0.5.0"},
{:dataloader, "~> 1.0.1"},
{:ex_doc, "~> 0.19", only: :dev, runtime: false},
{:timber, "~> 3.0.0"},
{:cors_plug, "~> 2.0"},
{:ecto_autoslug_field, "~> 1.0"},
{:mix_test_watch, "~> 0.9", only: :dev, runtime: false},
{:temp, "~> 0.4"},
{:ecto_enum, "~> 1.2"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate", "test --trace"]
]
end
end
| 29.414894 | 79 | 0.535262 |
9eb28f56965830361a11575d3a7e30daee3c7ae1 | 412 | ex | Elixir | lib/collection/address.ex | vinceurag/kups | 456d4e1e030721e70ffe616bc1fbb81c85a98c76 | [
"MIT"
] | 3 | 2018-02-19T05:22:59.000Z | 2018-02-19T05:37:45.000Z | lib/collection/address.ex | vinceurag/kups | 456d4e1e030721e70ffe616bc1fbb81c85a98c76 | [
"MIT"
] | null | null | null | lib/collection/address.ex | vinceurag/kups | 456d4e1e030721e70ffe616bc1fbb81c85a98c76 | [
"MIT"
] | null | null | null | defmodule Kups.Collection.Address do
@moduledoc false
def city do
[ "Alaminos", "Angeles", "Antipolo", "Bacolod", "Batangas City", "Biñan", "Dagupan", "Dapitan", "Dipolog", "Iloilo City", "Laoag", "Pasig", "Marikina", "San Fernando", "Zamboanga" ]
end
def province do
[ "Pangasinan", "Rizal", "Ilocos Sur", "Ilocos Norte", "Cebu", "Maguindanao", "Cavite", "Laguna", "Eastern Samar" ]
end
end
| 34.333333 | 185 | 0.648058 |
9eb297862d4d64e911912fc0e5e9a5ab6db14f06 | 140 | exs | Elixir | kekend/config/dev.exs | evbogdanov/serial_killer | 7e67452b26f66a0fd01eca0133e705219957a17d | [
"MIT"
] | null | null | null | kekend/config/dev.exs | evbogdanov/serial_killer | 7e67452b26f66a0fd01eca0133e705219957a17d | [
"MIT"
] | null | null | null | kekend/config/dev.exs | evbogdanov/serial_killer | 7e67452b26f66a0fd01eca0133e705219957a17d | [
"MIT"
] | null | null | null | use Mix.Config
config :serial_killer,
database: "serial_killer",
username: "postgres",
password: "postgres",
hostname: "localhost"
| 17.5 | 28 | 0.721429 |
9eb2bcb9c88d201ae0ac99f2bb4343f3c6c01ba9 | 927 | exs | Elixir | config/config.exs | raguiar9080/fractalman | 7fa5d87e23d3d3e718c8410e63a1b4c15036ce8e | [
"MIT"
] | null | null | null | config/config.exs | raguiar9080/fractalman | 7fa5d87e23d3d3e718c8410e63a1b4c15036ce8e | [
"MIT"
] | null | null | null | config/config.exs | raguiar9080/fractalman | 7fa5d87e23d3d3e718c8410e63a1b4c15036ce8e | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :gurren,
ecto_repos: [Gurren.Repo]
# Configures the endpoint
config :gurren, GurrenWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "7n+Ed4pN8qtc0AQ6QpfNXppA0IUud0SlHdXwhGtZbl097TQsuYRgt+qBebbiK4tv",
render_errors: [view: GurrenWeb.ErrorView, accepts: ~w(html json)],
pubsub: [name: Gurren.PubSub,
adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:user_id]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 33.107143 | 86 | 0.760518 |
9eb2fe31ef3d1641651d7c56c4ed20f09d104c79 | 21,990 | ex | Elixir | lib/commanded/commands/router.ex | octowombat/commanded | 79a1965e276d3369dcf70ae65ef904d7e59f4a6a | [
"MIT"
] | 1,220 | 2017-10-31T10:56:40.000Z | 2022-03-31T17:40:19.000Z | lib/commanded/commands/router.ex | octowombat/commanded | 79a1965e276d3369dcf70ae65ef904d7e59f4a6a | [
"MIT"
] | 294 | 2017-11-03T10:33:41.000Z | 2022-03-24T08:36:42.000Z | lib/commanded/commands/router.ex | octowombat/commanded | 79a1965e276d3369dcf70ae65ef904d7e59f4a6a | [
"MIT"
] | 208 | 2017-11-03T10:56:47.000Z | 2022-03-14T05:49:38.000Z | defmodule Commanded.Commands.Router do
@moduledoc """
Command routing macro to allow configuration of each command to its command handler.
## Example
Define a router module which uses `Commanded.Commands.Router` and configures
available commands to dispatch:
defmodule BankRouter do
use Commanded.Commands.Router
dispatch OpenAccount,
to: OpenAccountHandler,
aggregate: BankAccount,
identity: :account_number
end
The `to` option determines which module receives the command being dispatched.
This command handler module must implement a `handle/2` function. It receives
the aggregate's state and the command to execute. Usually the command handler
module will forward the command to the aggregate.
Once configured, you can either dispatch a command by using the module and
specifying the application:
command = %OpenAccount{account_number: "ACC123", initial_balance: 1_000}
:ok = BankRouter.dispatch(command, application: BankApp)
Or, more simply, you should include the router module in your application:
defmodule BankApp do
use Commanded.Application, otp_app: :my_app
router MyApp.Router
end
Then dispatch commands using the app:
command = %OpenAccount{account_number: "ACC123", initial_balance: 1_000}
:ok = BankApp.dispatch(command)
## Dispatch command directly to an aggregate
You can route a command directly to an aggregate, without requiring an
intermediate command handler.
### Example
defmodule BankRouter do
use Commanded.Commands.Router
# Will route to `BankAccount.open_account/2`
dispatch OpenAccount, to: BankAccount, identity: :account_number
end
By default, you must define an `execute/2` function on the aggregate module, which will be
called with the aggregate's state and the command to execute. Using this approach, you must
create an `execute/2` clause that pattern-matches on each command that the aggregate should
handle.
Alternatively, you may specify the name of a function (also receiving both the aggregate state
and the command) on your aggregate module to which the command will be dispatched:
### Example
defmodule BankRouter do
use Commanded.Commands.Router
# Will route to `BankAccount.open_account/2`
dispatch OpenAccount, to: BankAccount, function: :open_account, identity: :account_number
end
## Define aggregate identity
You can define the identity field for an aggregate once using the `identify` macro.
The configured identity will be used for all commands registered to the aggregate,
unless overridden by a command registration.
### Example
defmodule BankRouter do
use Commanded.Commands.Router
identify BankAccount,
by: :account_number,
prefix: "bank-account-"
dispatch OpenAccount, to: BankAccount
end
An optional identity prefix can be used to distinguish between different
aggregates that would otherwise share the same identity. As an example you
might have a `User` and a `UserPreferences` aggregate that you wish
to share the same identity. In this scenario you should specify a `prefix`
for each aggregate (e.g. "user-" and "user-preference-").
The prefix is used as the stream identity when appending and reading the
aggregate's events: "<identity_prefix><aggregate_uuid>". It can be a string or
a zero arity function returning a string.
## Consistency
You can choose the consistency guarantee when dispatching a command. The
available options are:
- `:eventual` (default) - don't block command dispatch while waiting for
event handlers
:ok = BankApp.dispatch(command)
:ok = BankApp.dispatch(command, consistency: :eventual)
- `:strong` - block command dispatch until all strongly
consistent event handlers and process managers have successfully processed
all events created by the command.
Use this when you have event handlers that update read models you need to
query immediately after dispatching the command.
:ok = BankApp.dispatch(command, consistency: :strong)
- Provide an explicit list of event handler and process manager modules (or
their configured names), containing only those handlers you'd like to wait
for. No other handlers will be awaited on, regardless of their own
configured consistency setting.
```elixir
:ok = BankApp.dispatch(command, consistency: [ExampleHandler, AnotherHandler])
:ok = BankApp.dispatch(command, consistency: ["ExampleHandler", "AnotherHandler"])
```
Note you cannot opt-in to strong consistency for a handler that has been
configured as eventually consistent.
## Dispatch return
By default a successful command dispatch will return `:ok`. You can change
this behaviour by specifying a `returning` option.
The supported options are:
- `:aggregate_state` - to return the update aggregate state.
- `:aggregate_version` - to return only the aggregate version.
- `:events` - to return the resultant domain events. An empty list will be
returned if no events were produced.
- `:execution_result` - to return a `Commanded.Commands.ExecutionResult`
struct containing the aggregate's identity, state, version, and any events
produced from the command along with their associated metadata.
- `false` - don't return anything except an `:ok`.
### Aggregate state
Return the updated aggregate state as part of the dispatch result:
{:ok, %BankAccount{}} = BankApp.dispatch(command, returning: :aggregate_state)
This is useful when you want to immediately return fields from the aggregate's
state without requiring an read model projection and waiting for the event(s)
to be projected. It may also be appropriate to use this feature for unit
tests.
However, be warned that tightly coupling an aggregate's state with read
requests may be harmful. It's why CQRS enforces the separation of reads from
writes by defining two separate and specialised models.
### Aggregate version
You can optionally choose to return the aggregate's version as part of the
dispatch result:
{:ok, aggregate_version} = BankApp.dispatch(command, returning: :aggregate_version)
This is useful when you need to wait for an event handler, such as a read model
projection, to be up-to-date before querying its data.
### Execution results
You can also choose to return the execution result as part of the dispatch
result:
alias Commanded.Commands.ExecutionResult
{:ok, %ExecutionResult{} = result} = BankApp.dispatch(command, returning: :execution_result)
Or by setting the `default_dispatch_return` in your application config file:
# config/config.exs
config :commanded, default_dispatch_return: :execution_result
Use the execution result struct to get information from the events produced
from the command.
## Metadata
You can associate metadata with all events created by the command.
Supply a map containing key/value pairs comprising the metadata:
:ok = BankApp.dispatch(command, metadata: %{"ip_address" => "127.0.0.1"})
"""
alias Commanded.Commands.Router
defmacro __using__(opts) do
quote do
require Logger
import unquote(__MODULE__)
@before_compile unquote(__MODULE__)
@behaviour Commanded.Commands.Router
Module.register_attribute(__MODULE__, :registered_commands, accumulate: true)
Module.register_attribute(__MODULE__, :registered_middleware, accumulate: true)
Module.register_attribute(__MODULE__, :registered_identities, accumulate: false)
@default_dispatch_opts [
application: Keyword.get(unquote(opts), :application),
consistency: Router.get_opt(unquote(opts), :default_consistency, :eventual),
returning: Router.get_default_dispatch_return(unquote(opts)),
timeout: 5_000,
lifespan: Commanded.Aggregates.DefaultLifespan,
metadata: %{},
retry_attempts: 10
]
@default_middleware [
Commanded.Middleware.ExtractAggregateIdentity,
Commanded.Middleware.ConsistencyGuarantee
]
@registered_identities %{}
end
end
@doc """
Include the given middleware module to be called before and after
success or failure of each command dispatch
The middleware module must implement the `Commanded.Middleware` behaviour.
Middleware modules are executed in the order they are defined.
## Example
defmodule BankRouter do
use Commanded.Commands.Router
middleware CommandLogger
middleware MyCommandValidator
middleware AuthorizeCommand
dispatch [OpenAccount, DepositMoney], to: BankAccount, identity: :account_number
end
"""
defmacro middleware(middleware_module) do
quote do
@registered_middleware unquote(middleware_module)
end
end
@doc """
Define an aggregate's identity
You can define the identity field for an aggregate using the `identify` macro.
The configured identity will be used for all commands registered to the
aggregate, unless overridden by a command registration.
## Example
defmodule BankRouter do
use Commanded.Commands.Router
identify BankAccount,
by: :account_number,
prefix: "bank-account-"
end
"""
defmacro identify(aggregate_module, opts) do
quote location: :keep, bind_quoted: [aggregate_module: aggregate_module, opts: opts] do
case Map.get(@registered_identities, aggregate_module) do
nil ->
by =
case Keyword.get(opts, :by) do
nil ->
raise "#{inspect(aggregate_module)} aggregate identity is missing the `by` option"
by when is_atom(by) ->
by
by when is_function(by, 1) ->
by
invalid ->
raise "#{inspect(aggregate_module)} aggregate identity has an invalid `by` option: #{inspect(invalid)}"
end
prefix =
case Keyword.get(opts, :prefix) do
nil ->
nil
prefix when is_function(prefix, 0) ->
prefix
prefix when is_binary(prefix) ->
prefix
invalid ->
raise "#{inspect(aggregate_module)} aggregate has an invalid identity prefix: #{inspect(invalid)}"
end
@registered_identities Map.put(@registered_identities, aggregate_module,
by: by,
prefix: prefix
)
config ->
raise "#{inspect(aggregate_module)} aggregate has already been identified by: `#{inspect(Keyword.get(config, :by))}`"
end
end
end
@doc """
Configure the command, or list of commands, to be dispatched to the
corresponding handler and aggregate.
## Example
defmodule BankRouter do
use Commanded.Commands.Router
dispatch [OpenAccount, DepositMoney], to: BankAccount, identity: :account_number
end
"""
defmacro dispatch(command_module_or_modules, opts) do
opts = parse_opts(opts, [])
for command_module <- List.wrap(command_module_or_modules) do
quote do
if Enum.any?(@registered_commands, fn {command_module, _command_opts} ->
command_module == unquote(command_module)
end) do
raise ArgumentError,
message:
"Command `#{inspect(unquote(command_module))}` has already been registered in router `#{inspect(__MODULE__)}`"
end
@registered_commands {
unquote(command_module),
Keyword.merge(@default_dispatch_opts, unquote(opts))
}
end
end
end
@doc """
Dispatch the given command to the registered handler.
Returns `:ok` on success, or `{:error, reason}` on failure.
## Example
command = %OpenAccount{account_number: "ACC123", initial_balance: 1_000}
:ok = BankRouter.dispatch(command)
"""
@callback dispatch(command :: struct()) ::
:ok
| {:ok, aggregate_state :: struct()}
| {:ok, aggregate_version :: non_neg_integer()}
| {:ok, execution_result :: Commanded.Commands.ExecutionResult.t()}
| {:error, :unregistered_command}
| {:error, :consistency_timeout}
| {:error, reason :: term()}
@doc """
Dispatch the given command to the registered handler providing a timeout.
- `command` is a command struct which must be registered with the router.
- `timeout_or_opts` is either an integer timeout, `:infinity`, or a keyword
list of options.
The timeout must be an integer greater than zero which specifies how many
milliseconds to allow the command to be handled, or the atom `:infinity`
to wait indefinitely. The default timeout value is five seconds.
Alternatively, an options keyword list can be provided with the following
options.
Options:
- `causation_id` - an optional UUID used to identify the cause of the
command being dispatched.
- `command_uuid` - an optional UUID used to identify the command being
dispatched.
- `correlation_id` - an optional UUID used to correlate related
commands/events together.
- `consistency` - one of `:eventual` (default) or `:strong`. By
setting the consistency to `:strong` a successful command dispatch
will block until all strongly consistent event handlers and process
managers have handled all events created by the command.
- `metadata` - an optional map containing key/value pairs comprising
the metadata to be associated with all events created by the
command.
- `returning` - to choose what response is returned from a successful
command dispatch. The default is to return an `:ok`.
The available options are:
- `:aggregate_state` - to return the update aggregate state in the
successful response: `{:ok, aggregate_state}`.
- `:aggregate_version` - to include the aggregate stream version
in the successful response: `{:ok, aggregate_version}`.
- `:events` - to return the resultant domain events. An empty list
will be returned if no events were produced.
- `:execution_result` - to return a `Commanded.Commands.ExecutionResult`
struct containing the aggregate's identity, version, and any
events produced from the command along with their associated
metadata.
- `false` - don't return anything except an `:ok`.
- `timeout` - as described above.
Returns `:ok` on success unless the `:returning` option is specified where
it returns one of `{:ok, aggregate_state}`, `{:ok, aggregate_version}`, or
`{:ok, %Commanded.Commands.ExecutionResult{}}`.
Returns `{:error, reason}` on failure.
## Example
command = %OpenAccount{account_number: "ACC123", initial_balance: 1_000}
:ok = BankRouter.dispatch(command, consistency: :strong, timeout: 30_000)
"""
@callback dispatch(
command :: struct(),
timeout_or_opts :: non_neg_integer() | :infinity | Keyword.t()
) ::
:ok
| {:ok, aggregate_state :: struct()}
| {:ok, aggregate_version :: non_neg_integer()}
| {:ok, execution_result :: Commanded.Commands.ExecutionResult.t()}
| {:error, :unregistered_command}
| {:error, :consistency_timeout}
| {:error, reason :: term()}
defmacro __before_compile__(_env) do
quote generated: true do
@doc false
def __registered_commands__ do
Enum.map(@registered_commands, fn {command_module, _command_opts} -> command_module end)
end
@doc false
def dispatch(command, opts \\ [])
@doc false
def dispatch(command, :infinity),
do: do_dispatch(command, timeout: :infinity)
@doc false
def dispatch(command, timeout) when is_integer(timeout),
do: do_dispatch(command, timeout: timeout)
@doc false
def dispatch(command, opts),
do: do_dispatch(command, opts)
@middleware Enum.reduce(@registered_middleware, @default_middleware, fn middleware, acc ->
[middleware | acc]
end)
for {command_module, command_opts} <- @registered_commands do
@aggregate Keyword.fetch!(command_opts, :aggregate)
@handler Keyword.fetch!(command_opts, :to)
@function Keyword.fetch!(command_opts, :function)
@before_execute Keyword.get(command_opts, :before_execute)
@lifespan Keyword.get(command_opts, :lifespan)
@identity Keyword.get(command_opts, :identity)
@identity_prefix Keyword.get(command_opts, :identity_prefix)
@command_module command_module
@command_opts command_opts
defp do_dispatch(%@command_module{} = command, opts) do
alias Commanded.Commands.Dispatcher
alias Commanded.Commands.Dispatcher.Payload
opts = Keyword.merge(@command_opts, opts)
application = Keyword.fetch!(opts, :application)
causation_id = Keyword.get(opts, :causation_id)
command_uuid = Keyword.get(opts, :command_uuid, UUID.uuid4())
consistency = Keyword.fetch!(opts, :consistency)
correlation_id = Keyword.get(opts, :correlation_id, UUID.uuid4())
metadata = opts |> Keyword.fetch!(:metadata) |> validate_metadata()
retry_attempts = Keyword.get(opts, :retry_attempts)
timeout = Keyword.fetch!(opts, :timeout)
returning =
cond do
Keyword.get(opts, :include_execution_result) == true ->
:execution_result
Keyword.get(opts, :include_aggregate_version) == true ->
:aggregate_version
(returning = Keyword.get(opts, :returning)) in [
:aggregate_state,
:aggregate_version,
:events,
:execution_result,
false
] ->
returning
true ->
false
end
{identity, identity_prefix} =
case Map.get(@registered_identities, @aggregate) do
nil ->
{@identity, @identity_prefix}
config ->
identity = Keyword.get(config, :by, @identity)
prefix = Keyword.get(config, :prefix, @identity_prefix)
{identity, prefix}
end
payload = %Payload{
application: application,
command: command,
command_uuid: command_uuid,
causation_id: causation_id,
correlation_id: correlation_id,
consistency: consistency,
handler_module: @handler,
handler_function: @function,
handler_before_execute: @before_execute,
aggregate_module: @aggregate,
identity: identity,
identity_prefix: identity_prefix,
returning: returning,
timeout: timeout,
lifespan: @lifespan,
metadata: metadata,
middleware: @middleware,
retry_attempts: retry_attempts
}
Dispatcher.dispatch(payload)
end
end
# Catch unregistered commands, log and return an error.
defp do_dispatch(command, _opts) do
Logger.error(fn ->
"attempted to dispatch an unregistered command: " <> inspect(command)
end)
{:error, :unregistered_command}
end
# Make sure the metadata must be Map.t()
defp validate_metadata(value) when is_map(value), do: value
defp validate_metadata(_), do: raise(ArgumentError, message: "metadata must be an map")
end
end
@doc false
def get_opt(opts, name, default \\ nil) do
Keyword.get(opts, name) || Application.get_env(:commanded, name) || default
end
@doc false
def get_default_dispatch_return(opts) do
cond do
(default_dispatch_return = get_opt(opts, :default_dispatch_return)) in [
:aggregate_state,
:aggregate_version,
:events,
:execution_result,
false
] ->
default_dispatch_return
get_opt(opts, :include_execution_result) == true ->
:execution_result
get_opt(opts, :include_aggregate_version) == true ->
:aggregate_version
true ->
false
end
end
@register_params [
:to,
:function,
:before_execute,
:aggregate,
:identity,
:identity_prefix,
:timeout,
:lifespan,
:consistency
]
defp parse_opts([{:to, aggregate_or_handler} | opts], result) do
case Keyword.pop(opts, :aggregate) do
{nil, opts} ->
aggregate = aggregate_or_handler
parse_opts(opts, [function: :execute, to: aggregate, aggregate: aggregate] ++ result)
{aggregate, opts} ->
handler = aggregate_or_handler
parse_opts(opts, [function: :handle, to: handler, aggregate: aggregate] ++ result)
end
end
defp parse_opts([{param, value} | opts], result) when param in @register_params do
parse_opts(opts, [{param, value} | result])
end
defp parse_opts([{param, _value} | _opts], _result) do
raise """
unexpected dispatch parameter "#{param}"
available params are: #{@register_params |> Enum.map(&to_string/1) |> Enum.join(", ")}
"""
end
defp parse_opts([], result), do: result
end
| 33.167421 | 127 | 0.655707 |
9eb3251e27da519b041027aeaec5ab08a900c1c7 | 6,277 | ex | Elixir | lib/credo/code/module.ex | sevenseacat/credo | 48837401040d9c2340b5fb9c7d786d31f89f6426 | [
"MIT"
] | null | null | null | lib/credo/code/module.ex | sevenseacat/credo | 48837401040d9c2340b5fb9c7d786d31f89f6426 | [
"MIT"
] | null | null | null | lib/credo/code/module.ex | sevenseacat/credo | 48837401040d9c2340b5fb9c7d786d31f89f6426 | [
"MIT"
] | null | null | null | defmodule Credo.Code.Module do
@moduledoc """
This module provides helper functions to analyse modules, return the defined
funcions or module attributes.
"""
alias Credo.Code
alias Credo.Code.Block
@def_ops [:def, :defp, :defmacro]
@doc "Returns the list of aliases defined in a given module source code."
def aliases({:defmodule, _, _arguments} = ast) do
ast
|> Credo.Code.postwalk(&find_aliases/2)
|> Enum.uniq
end
defp find_aliases({:alias, _, [{:__aliases__, _, mod_list}]} = ast, aliases) do
module_names =
mod_list
|> Credo.Code.Name.full
|> List.wrap
{ast, aliases ++ module_names}
end
# Multi alias
defp find_aliases({:alias, _, [{{:., _, [{:__aliases__, _, mod_list}, :{}]}, _, multi_mod_list}]} = ast, aliases) do
module_names =
Enum.map(multi_mod_list, fn(tuple) ->
Credo.Code.Name.full([Credo.Code.Name.full(mod_list), Credo.Code.Name.full(tuple)])
end)
{ast, aliases ++ module_names}
end
defp find_aliases(ast, aliases) do
{ast, aliases}
end
@doc "Reads an attribute from a module's `ast`"
def attribute(ast, attr_name) do
case Code.postwalk(ast, &find_attribute(&1, &2, attr_name), {:error, nil}) do
{:ok, value} ->
value
error ->
error
end
end
defp find_attribute({:@, _meta, arguments} = ast, tuple, attribute_name) do
case List.first(arguments) do
{^attribute_name, _meta, [value]} ->
{:ok, value}
_ ->
{ast, tuple}
end
end
defp find_attribute(ast, tuple, _name) do
{ast, tuple}
end
@doc "Returns the function/macro count for the given module's AST"
def def_count(nil), do: 0
def def_count({:defmodule, _, _arguments} = ast) do
ast
|> Code.postwalk(&collect_defs/2)
|> Enum.count
end
def defs(nil), do: []
def defs({:defmodule, _, _arguments} = ast) do
Code.postwalk(ast, &collect_defs/2)
end
@doc "Returns the arity of the given function definition `ast`"
for op <- @def_ops do
def def_arity({unquote(op) = op, _, [{:when, _, fun_ast}, _]}) do
def_arity({op, nil, fun_ast})
end
def def_arity({unquote(op), _, [{_fun_name, _, arguments}, _]}) when is_list(arguments) do
Enum.count(arguments)
end
def def_arity({unquote(op), _, [{_fun_name, _, _}, _]}), do: 0
end
def def_arity(_), do: nil
@doc "Returns the name of the function/macro defined in the given `ast`"
for op <- @def_ops do
def def_name({unquote(op) = op, _, [{:when, _, fun_ast}, _]}) do
def_name({op, nil, fun_ast})
end
def def_name({unquote(op), _, [{fun_name, _, _arguments}, _]}) when is_atom(fun_name) do
fun_name
end
end
def def_name(_), do: nil
@doc "Returns the {fun_name, op} tuple of the function/macro defined in the given `ast`"
for op <- @def_ops do
def def_name_with_op({unquote(op) = op, _, _} = ast) do
{def_name(ast), op}
end
def def_name_with_op({unquote(op) = op, _, _} = ast, arity) do
if def_arity(ast) == arity do
{def_name(ast), op}
else
nil
end
end
end
def def_name_with_op(_), do: nil
@doc "Returns the name of the functions/macros for the given module's `ast`"
def def_names(nil), do: []
def def_names({:defmodule, _, _arguments} = ast) do
ast
|> Code.postwalk(&collect_defs/2)
|> Enum.map(&def_name/1)
|> Enum.uniq
end
@doc "Returns the name of the functions/macros for the given module's `ast`"
def def_names_with_op(nil), do: []
def def_names_with_op({:defmodule, _, _arguments} = ast) do
ast
|> Code.postwalk(&collect_defs/2)
|> Enum.map(&def_name_with_op/1)
|> Enum.uniq
end
@doc "Returns the name of the functions/macros for the given module's `ast` if it has the given `arity`."
def def_names_with_op(nil, _arity), do: []
def def_names_with_op({:defmodule, _, _arguments} = ast, arity) do
ast
|> Code.postwalk(&collect_defs/2)
|> Enum.map(&def_name_with_op(&1, arity))
|> Enum.reject(&is_nil/1)
|> Enum.uniq
end
for op <- @def_ops do
defp collect_defs({:@, _, [{unquote(op), _, arguments} = ast]}, defs) when is_list(arguments) do
{ast, defs -- [ast]}
end
defp collect_defs({unquote(op), _, arguments} = ast, defs) when is_list(arguments) do
{ast, defs ++ [ast]}
end
end
defp collect_defs(ast, defs) do
{ast, defs}
end
@doc "Returns the list of modules used in a given module source code."
def modules({:defmodule, _, _arguments} = ast) do
ast
|> Code.postwalk(&find_dependent_modules/2)
|> Enum.uniq
end
# exclude module name
defp find_dependent_modules({:defmodule, _, [{:__aliases__, _, mod_list}, _do_block]} = ast, modules) do
module_names =
mod_list
|> Credo.Code.Name.full
|> List.wrap
{ast, modules -- module_names}
end
# single alias
defp find_dependent_modules({:alias, _, [{:__aliases__, _, mod_list}]} = ast, aliases) do
module_names =
mod_list
|> Credo.Code.Name.full
|> List.wrap
{ast, aliases -- module_names}
end
# multi alias
defp find_dependent_modules({:alias, _, [{{:., _, [{:__aliases__, _, mod_list}, :{}]}, _, multi_mod_list}]} = ast, modules) do
module_names =
Enum.flat_map(multi_mod_list, fn(tuple) ->
[Credo.Code.Name.full(mod_list), Credo.Code.Name.full(tuple)]
end)
{ast, modules -- module_names}
end
defp find_dependent_modules({:__aliases__, _, mod_list} = ast, modules) do
module_names =
mod_list
|> Credo.Code.Name.full
|> List.wrap
{ast, modules ++ module_names}
end
defp find_dependent_modules(ast, modules) do
{ast, modules}
end
@doc "Returns the name of a module's given ast node."
def name({:defmodule, _, [{:__aliases__, _, name_list}, _]}) do
Enum.join(name_list, ".")
end
def name(_), do: "<Unknown Module Name>"
# TODO: write unit test
def exception?({:defmodule, _, [{:__aliases__, _, _name_list}, arguments]}) do
arguments
|> Block.calls_in_do_block
|> Enum.any?(&defexception?/1)
end
def exception?(_), do: nil
defp defexception?({:defexception, _, _}), do: true
defp defexception?(_), do: false
end
| 26.045643 | 128 | 0.629122 |
9eb3488b43bd7ec364db98a0e31e0828fdf393e9 | 498 | exs | Elixir | apps/authenticator/priv/repo/migrations/20201004125044_create_user_sign_in_attempt_table.exs | dcdourado/watcher_ex | ce80df81610a6e9b77612911aac2a6d6cf4de8d5 | [
"Apache-2.0"
] | 9 | 2020-10-13T14:11:37.000Z | 2021-08-12T18:40:08.000Z | apps/authenticator/priv/repo/migrations/20201004125044_create_user_sign_in_attempt_table.exs | dcdourado/watcher_ex | ce80df81610a6e9b77612911aac2a6d6cf4de8d5 | [
"Apache-2.0"
] | 28 | 2020-10-04T14:43:48.000Z | 2021-12-07T16:54:22.000Z | apps/authenticator/priv/repo/migrations/20201004125044_create_user_sign_in_attempt_table.exs | dcdourado/watcher_ex | ce80df81610a6e9b77612911aac2a6d6cf4de8d5 | [
"Apache-2.0"
] | 3 | 2020-11-25T20:59:47.000Z | 2021-08-30T10:36:58.000Z | defmodule Authenticator.Repo.Migrations.CreateUserSignInAttemptTable do
use Ecto.Migration
def change do
create_if_not_exists table(:user_sign_in_attempt, primary_key: false) do
add :id, :uuid, primary_key: true
add :username, :string, null: false
add :was_successful, :boolean, null: false
add :ip_address, :string, null: false
timestamps()
end
create_if_not_exists index(:user_sign_in_attempt, [:username, :was_successful, :ip_address])
end
end
| 29.294118 | 96 | 0.728916 |
9eb365c04de1de9bfcd71df1da4451049011d036 | 205 | exs | Elixir | test/controllers/page_controller_test.exs | MarcAntoine-Arnaud/ecto_relations | 477853429af12adb33cd8000b29da044649c1283 | [
"MIT"
] | null | null | null | test/controllers/page_controller_test.exs | MarcAntoine-Arnaud/ecto_relations | 477853429af12adb33cd8000b29da044649c1283 | [
"MIT"
] | null | null | null | test/controllers/page_controller_test.exs | MarcAntoine-Arnaud/ecto_relations | 477853429af12adb33cd8000b29da044649c1283 | [
"MIT"
] | null | null | null | defmodule EctoRelations.PageControllerTest do
use EctoRelations.ConnCase
test "GET /", %{conn: conn} do
conn = get conn, "/"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 22.777778 | 60 | 0.692683 |
9eb372e9fdc3ffb174ecf692800f7da9fca89d11 | 971 | exs | Elixir | mix.exs | cgrothaus/ueberauth_oidc | 31798ffab8e86a0299512ea215234d45b42d06d7 | [
"MIT"
] | null | null | null | mix.exs | cgrothaus/ueberauth_oidc | 31798ffab8e86a0299512ea215234d45b42d06d7 | [
"MIT"
] | 5 | 2019-06-04T12:10:20.000Z | 2022-02-18T15:12:40.000Z | mix.exs | cgrothaus/ueberauth_oidc | 31798ffab8e86a0299512ea215234d45b42d06d7 | [
"MIT"
] | 4 | 2021-03-15T18:47:39.000Z | 2021-11-03T15:03:46.000Z | defmodule UeberauthOIDC.Mixfile do
use Mix.Project
def project do
[
app: :ueberauth_oidc,
name: "Ueberauth OIDC",
version: "0.1.2",
elixir: "~> 1.7",
description: """
An Ueberauth strategy for generic OpenID Connect authentication.
""",
package: package(),
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
def application do
[
extra_applications: [:logger]
]
end
defp package do
[
maintainers: ["Rick Littel - @Kuret"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/DefactoSoftware/ueberauth_oidc"}
]
end
defp deps do
[
{:credo, "~> 1.5", only: [:dev, :test]},
{:ex_doc, "~> 0.24", only: [:dev, :test]},
{:jose, "~> 1.11"},
{:httpoison, "~> 1.8"},
{:mock, "~> 0.3", only: :test},
{:openid_connect, "~> 0.2.2"},
{:plug, "~> 1.11"},
{:ueberauth, "~> 0.6"}
]
end
end
| 21.108696 | 79 | 0.510814 |
9eb37952889ea8c500f9379c8e6f152a4cd36e85 | 1,386 | exs | Elixir | word-count/test/word_count_test.exs | koladilip/elixir-examples | 49553fe39bbff6e35d1cb2c26fcb6d87fe4e28d3 | [
"MIT"
] | null | null | null | word-count/test/word_count_test.exs | koladilip/elixir-examples | 49553fe39bbff6e35d1cb2c26fcb6d87fe4e28d3 | [
"MIT"
] | null | null | null | word-count/test/word_count_test.exs | koladilip/elixir-examples | 49553fe39bbff6e35d1cb2c26fcb6d87fe4e28d3 | [
"MIT"
] | null | null | null | defmodule WordCountTest do
use ExUnit.Case
test "count one word" do
assert WordCount.count("word") == %{"word" => 1}
end
test "count one of each" do
expected = %{"one" => 1, "of" => 1, "each" => 1}
assert WordCount.count("one of each") == expected
end
test "count multiple occurrences" do
expected = %{"one" => 1, "fish" => 4, "two" => 1, "red" => 1, "blue" => 1}
assert WordCount.count("one fish two fish red fish blue fish") == expected
end
test "ignore punctuation" do
expected = %{"car" => 1, "carpet" => 1, "as" => 1, "java" => 1, "javascript" => 1}
assert WordCount.count("car : carpet as java : javascript!!&@$%^&") == expected
end
test "include numbers" do
expected = %{"testing" => 2, "1" => 1, "2" => 1}
assert WordCount.count("testing, 1, 2 testing") == expected
end
test "hyphens" do
expected = %{"co-operative" => 1}
assert WordCount.count("co-operative") == expected
end
test "ignore underscores" do
expected = %{"two" => 1, "words" => 1}
assert WordCount.count("two_words") == expected
end
test "normalize case" do
expected = %{"go" => 3}
assert WordCount.count("go Go GO") == expected
end
test "German" do
expected = %{"götterfunken" => 1, "schöner" => 1, "freude" => 1}
assert WordCount.count("Freude schöner Götterfunken") == expected
end
end
| 24.75 | 86 | 0.592352 |
9eb38c21aff240927c1e20a055fa3c189fd9626d | 661 | exs | Elixir | bench/benchmarks/jobs_bench.exs | locaweb/kiq | fa561b9a1a2361e99b4af5eafc4d3c7ece576ea9 | [
"MIT"
] | 59 | 2018-07-13T23:17:00.000Z | 2021-12-21T07:03:03.000Z | bench/benchmarks/jobs_bench.exs | locaweb/kiq | fa561b9a1a2361e99b4af5eafc4d3c7ece576ea9 | [
"MIT"
] | 5 | 2019-08-05T13:14:42.000Z | 2022-03-09T12:38:46.000Z | bench/benchmarks/jobs_bench.exs | locaweb/kiq | fa561b9a1a2361e99b4af5eafc4d3c7ece576ea9 | [
"MIT"
] | 6 | 2018-09-24T13:16:21.000Z | 2021-05-03T08:26:52.000Z | defmodule Bench.Worker do
use Kiq.Worker, queue: "bench"
import Bench.Kiq, only: [bin_to_pid: 1]
def perform([index, index, pid_bin]) do
send(bin_to_pid(pid_bin), :finished)
end
def perform([index, total, _pid_bin]) do
index * total
end
end
enqueue_and_wait = fn total ->
pid_bin = Bench.Kiq.pid_to_bin(self())
for index <- 0..total do
[index, total, pid_bin]
|> Bench.Worker.new()
|> Bench.Kiq.enqueue()
end
receive do
:finished -> :ok
after
5_000 -> IO.puts "No message received"
end
end
Benchee.run(
%{"Enqueue & Perform" => enqueue_and_wait},
inputs: %{"One Hundred Jobs" => 100},
time: 10
)
| 18.361111 | 45 | 0.641452 |
9eb38ec68fbd4372c338cd089f417703572d394e | 235 | ex | Elixir | test/support/commands/create_account.ex | Cantido/calamity | 203c95eca8cbe6cf7eab8a8f88f14ece6246935f | [
"MIT"
] | null | null | null | test/support/commands/create_account.ex | Cantido/calamity | 203c95eca8cbe6cf7eab8a8f88f14ece6246935f | [
"MIT"
] | 1 | 2022-02-28T11:22:00.000Z | 2022-02-28T11:22:00.000Z | test/support/commands/create_account.ex | Cantido/calamity | 203c95eca8cbe6cf7eab8a8f88f14ece6246935f | [
"MIT"
] | null | null | null | # SPDX-FileCopyrightText: 2021 Rosa Richter
#
# SPDX-License-Identifier: MIT
defmodule Calamity.Commands.CreateAccount do
@derive {Calamity.Command, mod: Calamity.BankAccount, key: :account_id}
defstruct [
:account_id
]
end
| 21.363636 | 73 | 0.757447 |
9eb398ccb155fcbabd3c83fefb63784c0478d3ce | 1,539 | ex | Elixir | clients/cloud_kms/lib/google_api/cloud_kms/v1/model/test_iam_permissions_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/cloud_kms/lib/google_api/cloud_kms/v1/model/test_iam_permissions_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/cloud_kms/lib/google_api/cloud_kms/v1/model/test_iam_permissions_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudKMS.V1.Model.TestIamPermissionsResponse do
@moduledoc """
Response message for `TestIamPermissions` method.
## Attributes
* `permissions` (*type:* `list(String.t)`, *default:* `nil`) - A subset of `TestPermissionsRequest.permissions` that the caller is
allowed.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:permissions => list(String.t())
}
field(:permissions, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.CloudKMS.V1.Model.TestIamPermissionsResponse do
def decode(value, options) do
GoogleApi.CloudKMS.V1.Model.TestIamPermissionsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudKMS.V1.Model.TestIamPermissionsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.0625 | 134 | 0.744639 |
9eb405ac0f9aa6b23eff08477aea84303075f459 | 1,633 | ex | Elixir | clients/cloud_shell/lib/google_api/cloud_shell/v1/model/start_environment_metadata.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/cloud_shell/lib/google_api/cloud_shell/v1/model/start_environment_metadata.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/cloud_shell/lib/google_api/cloud_shell/v1/model/start_environment_metadata.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.CloudShell.V1.Model.StartEnvironmentMetadata do
@moduledoc """
Message included in the metadata field of operations returned from StartEnvironment.
## Attributes
- state (String.t): Current state of the environment being started. Defaults to: `null`.
- Enum - one of [STATE_UNSPECIFIED, STARTING, UNARCHIVING_DISK, FINISHED]
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:state => any()
}
field(:state)
end
defimpl Poison.Decoder, for: GoogleApi.CloudShell.V1.Model.StartEnvironmentMetadata do
def decode(value, options) do
GoogleApi.CloudShell.V1.Model.StartEnvironmentMetadata.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudShell.V1.Model.StartEnvironmentMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.326531 | 90 | 0.75444 |
9eb40e1714b8867306d21409cc9f78eebb372c3b | 1,925 | ex | Elixir | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/identity_selector.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/identity_selector.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/identity_selector.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudAsset.V1.Model.IdentitySelector do
@moduledoc """
Specifies an identity for which to determine resource access, based on roles assigned either directly to them or to the groups they belong to, directly or indirectly.
## Attributes
* `identity` (*type:* `String.t`, *default:* `nil`) - Required. The identity appear in the form of principals in [IAM policy binding](https://cloud.google.com/iam/reference/rest/v1/Binding). The examples of supported forms are: "user:[email protected]", "group:[email protected]", "domain:google.com", "serviceAccount:[email protected]". Notice that wildcard characters (such as * and ?) are not supported. You must give a specific identity.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:identity => String.t() | nil
}
field(:identity)
end
defimpl Poison.Decoder, for: GoogleApi.CloudAsset.V1.Model.IdentitySelector do
def decode(value, options) do
GoogleApi.CloudAsset.V1.Model.IdentitySelector.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudAsset.V1.Model.IdentitySelector do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.957447 | 469 | 0.750649 |
9eb43991ed36e3ba7076e1b25e8fb98eeecb89ba | 118 | ex | Elixir | lib/doc_gen_web/views/about_view.ex | the-mikedavis/doc_gen | efcc884ea65bba5748f41c5601abd00db2777ec4 | [
"BSD-3-Clause"
] | null | null | null | lib/doc_gen_web/views/about_view.ex | the-mikedavis/doc_gen | efcc884ea65bba5748f41c5601abd00db2777ec4 | [
"BSD-3-Clause"
] | 27 | 2018-10-29T18:34:44.000Z | 2019-03-11T18:43:12.000Z | lib/doc_gen_web/views/about_view.ex | the-mikedavis/doc_gen | efcc884ea65bba5748f41c5601abd00db2777ec4 | [
"BSD-3-Clause"
] | null | null | null | defmodule DocGenWeb.AboutView do
use DocGenWeb, :view
def title(_, %{title: title}), do: title <> " - About"
end
| 19.666667 | 56 | 0.669492 |
9eb4483e12812e9f76392e044f97d3c26c304f71 | 56 | exs | Elixir | test/payment_messenger_test.exs | aleDsz/payment-messenger | 91477413373b96167fc8185ff7dc0acc78c8be41 | [
"Apache-2.0"
] | 2 | 2021-07-18T11:41:29.000Z | 2021-07-18T22:27:04.000Z | test/payment_messenger_test.exs | aleDsz/payment-messenger | 91477413373b96167fc8185ff7dc0acc78c8be41 | [
"Apache-2.0"
] | null | null | null | test/payment_messenger_test.exs | aleDsz/payment-messenger | 91477413373b96167fc8185ff7dc0acc78c8be41 | [
"Apache-2.0"
] | null | null | null | defmodule PaymentMessengerTest do
use ExUnit.Case
end
| 14 | 33 | 0.839286 |
9eb469c295816cb5a96da347d9c4591e228d2b8a | 3,080 | ex | Elixir | lib/cli/commands/add/execute.ex | AnilRedshift/wand-cli | c364cf5397353fd61ba0e5fc33225575eb72ccda | [
"BSD-3-Clause"
] | 3 | 2018-07-01T05:31:02.000Z | 2019-08-03T04:11:32.000Z | lib/cli/commands/add/execute.ex | AnilRedshift/wand-cli | c364cf5397353fd61ba0e5fc33225575eb72ccda | [
"BSD-3-Clause"
] | 8 | 2018-06-28T07:30:04.000Z | 2018-07-13T07:36:56.000Z | lib/cli/commands/add/execute.ex | AnilRedshift/wand-cli | c364cf5397353fd61ba0e5fc33225575eb72ccda | [
"BSD-3-Clause"
] | null | null | null | defmodule Wand.CLI.Commands.Add.Execute do
@moduledoc false
alias Wand.CLI.Commands.Add.Package
alias Wand.CLI.Executor.Result
alias WandCore.WandFile
alias WandCore.WandFile.Dependency
alias Wand.CLI.DependencyDownloader
def execute(packages, %{wand_file: file}) do
with {:ok, dependencies} <- get_dependencies(packages),
{:ok, file} <- add_dependencies(file, dependencies) do
message =
Enum.map(dependencies, fn %Dependency{name: name, requirement: requirement} ->
"Succesfully added #{name}: #{requirement}"
end)
|> Enum.join("\n")
{:ok, %Result{wand_file: file, message: message}}
else
error -> error
end
end
def after_save(packages), do: download(packages)
defp get_dependencies(packages) do
dependencies =
packages
|> Enum.map(&Task.async(fn -> get_dependency(&1) end))
|> Enum.map(&Task.await/1)
case Enum.find(dependencies, &(elem(&1, 0) == :error)) do
nil -> {:ok, Enum.unzip(dependencies) |> elem(1)}
{:error, {:not_found, name}} -> {:error, :package_not_found, name}
{:error, {reason, _name}} -> {:error, :hex_api_error, reason}
end
end
defp get_dependency(%Package{name: name, requirement: {:latest, mode}} = package) do
case Wand.Hex.releases(name) do
{:ok, [version | _]} ->
requirement = Wand.Mode.get_requirement!(mode, version)
opts = get_opts(package)
{:ok,
%Dependency{
name: name,
opts: opts,
requirement: requirement
}}
{:error, error} ->
{:error, {error, name}}
end
end
defp get_dependency(%Package{} = package) do
{:ok,
%Dependency{
name: package.name,
opts: get_opts(package),
requirement: package.requirement
}}
end
defp add_dependencies(file, dependencies) do
Enum.reduce_while(dependencies, {:ok, file}, fn dependency, {:ok, file} ->
case WandFile.add(file, dependency) do
{:ok, file} -> {:cont, {:ok, file}}
{:error, {:already_exists, name}} -> {:halt, {:error, :package_already_exists, name}}
end
end)
end
defp get_opts(%Package{details: details} = package) do
get_base_opts(package)
|> Keyword.merge(get_detail_opts(details))
|> Enum.into(%{})
end
defp get_base_opts(%Package{} = package) do
[
:compile_env,
:only,
:optional,
:override,
:read_app_file,
:runtime
]
|> get_changed(package, %Package{})
end
defp get_detail_opts(details) do
default =
Map.fetch!(details, :__struct__)
|> struct()
Map.keys(details)
|> get_changed(details, default)
end
defp get_changed(keys, config, default) do
defaults = Enum.map(keys, &Map.fetch!(config, &1))
Enum.zip(keys, defaults)
|> Enum.filter(fn {key, value} ->
value != Map.fetch!(default, key)
end)
end
defp download([%Package{download: download} | _]) when not download, do: :ok
defp download(_), do: DependencyDownloader.download()
end
| 27.017544 | 93 | 0.61461 |
9eb4927545e24ee228dc1c9df8e9f2d33319daee | 659 | ex | Elixir | lib/impl/atomizer/map.ex | Ventup-IT/digger | 60e4cf70ac5f4ab712d45448f8c7971a1db929b2 | [
"MIT"
] | null | null | null | lib/impl/atomizer/map.ex | Ventup-IT/digger | 60e4cf70ac5f4ab712d45448f8c7971a1db929b2 | [
"MIT"
] | null | null | null | lib/impl/atomizer/map.ex | Ventup-IT/digger | 60e4cf70ac5f4ab712d45448f8c7971a1db929b2 | [
"MIT"
] | 1 | 2021-11-09T14:20:08.000Z | 2021-11-09T14:20:08.000Z | defimpl Digger.Atomizer, for: Map do
alias Digger.Atomizer
alias Digger.Opts.Atomizer, as: Opts
def atomize(map, opts) do
opts = Opts.set_options(opts)
map
|> Enum.reduce(%{}, fn {key, value}, acc ->
Map.merge(acc, %{
Atomizer.atomize(key,
type: :key,
key_transform: Keyword.get(opts, :key_transform),
value_transform: Keyword.get(opts, :value_transform)
) =>
Atomizer.atomize(value,
type: :value,
key_transform: Keyword.get(opts, :key_transform),
value_transform: Keyword.get(opts, :value_transform)
)
})
end)
end
end
| 26.36 | 64 | 0.587253 |
9eb49a4ca4b015827a0a8c080b054fe7e3ee7ba1 | 1,097 | ex | Elixir | lib/mix/tasks/init.ex | MeneDev/espec | ec4b3d579c5192999e930224a8a2650bb1fdf0bc | [
"Apache-2.0"
] | 807 | 2015-03-25T14:00:19.000Z | 2022-03-24T08:08:15.000Z | lib/mix/tasks/init.ex | MeneDev/espec | ec4b3d579c5192999e930224a8a2650bb1fdf0bc | [
"Apache-2.0"
] | 254 | 2015-03-27T10:12:25.000Z | 2021-07-12T01:40:15.000Z | lib/mix/tasks/init.ex | MeneDev/espec | ec4b3d579c5192999e930224a8a2650bb1fdf0bc | [
"Apache-2.0"
] | 85 | 2015-04-02T10:25:19.000Z | 2021-01-30T21:30:43.000Z | defmodule Mix.Tasks.Espec.Init do
use Mix.Task
import Mix.Generator
@preferred_cli_env :test
@shortdoc "Create spec/spec_helper.exs"
@moduledoc """
Creates necessary files.
This tasks creates `spec/spec_helper.exs`
"""
@spec_folder "spec"
@spec_helper "spec_helper.exs"
@shared_spec_folder "shared"
@shared_spec_example "example_spec.exs"
def run(_args) do
create_directory(@spec_folder)
create_file(Path.join(@spec_folder, @spec_helper), spec_helper_template(nil))
shared_specs = Path.join(@spec_folder, @shared_spec_folder)
create_directory(shared_specs)
create_file(Path.join(shared_specs, @shared_spec_example), shared_spec_example_template(nil))
end
embed_template(:spec_helper, """
ESpec.configure fn(config) ->
config.before fn(tags) ->
{:shared, hello: :world, tags: tags}
end
config.finally fn(_shared) ->
:ok
end
end
""")
embed_template(:shared_spec_example, """
defmodule ExampleSharedSpec do
use ESpec, shared: true
# This shared spec will always be included!
end
""")
end
| 21.94 | 97 | 0.708295 |
9eb4a292ab1b757edee88b07bf7d5b3cb2f13ff8 | 2,317 | exs | Elixir | apps/artemis_web/test/artemis_web/browser/wiki_revision_page_test.exs | artemis-platform/artemis_dashboard | 5ab3f5ac4c5255478bbebf76f0e43b44992e3cab | [
"MIT"
] | 9 | 2019-08-19T19:56:34.000Z | 2022-03-22T17:56:38.000Z | apps/artemis_web/test/artemis_web/browser/wiki_revision_page_test.exs | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 7 | 2019-07-12T21:41:01.000Z | 2020-08-17T21:29:22.000Z | apps/artemis_web/test/artemis_web/browser/wiki_revision_page_test.exs | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 2 | 2019-07-05T22:51:47.000Z | 2019-08-19T19:56:37.000Z | defmodule ArtemisWeb.WikiRevisionPageTest do
use ArtemisWeb.ConnCase
use ExUnit.Case
use Hound.Helpers
import Artemis.Factories
import ArtemisWeb.BrowserHelpers
import ArtemisWeb.Router.Helpers
@moduletag :browser
@url wiki_page_url(ArtemisWeb.Endpoint, :index)
hound_session()
describe "authentication" do
test "requires authentication" do
navigate_to(@url)
assert redirected_to_sign_in_page?()
end
end
describe "index" do
setup do
wiki_page = insert(:wiki_page)
wiki_revision = insert(:wiki_revision, wiki_page: wiki_page)
browser_sign_in()
navigate_to(@url)
click_link(wiki_page.title)
click_link("View Revisions")
{:ok, wiki_page: wiki_page, wiki_revision: wiki_revision}
end
test "list of records" do
assert page_title() == "Artemis"
assert visible?("Documentation")
end
test "search", %{wiki_revision: wiki_revision} do
fill_inputs(".search-resource", %{
query: wiki_revision.title
})
submit_search(".search-resource")
assert visible?(wiki_revision.title)
end
end
describe "show" do
setup do
wiki_page = insert(:wiki_page)
wiki_revision = insert(:wiki_revision, wiki_page: wiki_page)
browser_sign_in()
navigate_to(@url)
click_link(wiki_page.title)
click_link("View Revisions")
{:ok, wiki_page: wiki_page, wiki_revision: wiki_revision}
end
test "record details", %{wiki_revision: wiki_revision} do
click_link(wiki_revision.title)
assert visible?(wiki_revision.title)
assert visible?("Notice")
end
end
describe "delete" do
setup do
wiki_page = insert(:wiki_page)
wiki_revision = insert(:wiki_revision, wiki_page: wiki_page)
browser_sign_in()
navigate_to(@url)
click_link(wiki_page.title)
click_link("View Revisions")
{:ok, wiki_page: wiki_page, wiki_revision: wiki_revision}
end
@tag :uses_browser_alert_box
# test "deletes record and redirects to index", %{wiki_revision: wiki_revision} do
# click_link(wiki_revision.title)
# click_button("Delete")
# accept_dialog()
# assert current_url() == @url
# assert not visible?(wiki_revision.title)
# end
end
end
| 23.17 | 86 | 0.676306 |
9eb4a4bfd46dd4a44cb73d87111f749ed738a727 | 1,901 | exs | Elixir | exercises/anagram/test/anagram_test.exs | filalex77/exercism-elixir | df6beeea3fe8b003a0f761aa3f02b66a8a5e83ff | [
"MIT"
] | null | null | null | exercises/anagram/test/anagram_test.exs | filalex77/exercism-elixir | df6beeea3fe8b003a0f761aa3f02b66a8a5e83ff | [
"MIT"
] | null | null | null | exercises/anagram/test/anagram_test.exs | filalex77/exercism-elixir | df6beeea3fe8b003a0f761aa3f02b66a8a5e83ff | [
"MIT"
] | null | null | null | defmodule AnagramTest do
use ExUnit.Case
# @tag :pending
test "no matches" do
matches = Anagram.match("diaper", ["hello", "world", "zombies", "pants"])
assert matches == []
end
@tag :pending
test "detect simple anagram" do
matches = Anagram.match("ant", ["tan", "stand", "at"])
assert matches == ["tan"]
end
@tag :pending
test "detect multiple anagrams" do
matches = Anagram.match("master", ["stream", "pigeon", "maters"])
assert matches == ["stream", "maters"]
end
@tag :pending
test "do not detect anagram subsets" do
matches = Anagram.match("good", ~w(dog goody))
assert matches == []
end
@tag :pending
test "detect anagram" do
matches = Anagram.match("listen", ~w(enlists google inlets banana))
assert matches == ["inlets"]
end
@tag :pending
test "multiple anagrams" do
matches = Anagram.match("allergy", ~w(gallery ballerina regally clergy largely leading))
assert matches == ["gallery", "regally", "largely"]
end
@tag :pending
test "anagrams must use all letters exactly once" do
matches = Anagram.match("patter", ["tapper"])
assert matches == []
end
@tag :pending
test "detect anagrams with case-insensitive subject" do
matches = Anagram.match("Orchestra", ~w(cashregister carthorse radishes))
assert matches == ["carthorse"]
end
@tag :pending
test "detect anagrams with case-insensitive candidate" do
matches = Anagram.match("orchestra", ~w(cashregister Carthorse radishes))
assert matches == ["Carthorse"]
end
@tag :pending
test "anagrams must not be the source word" do
matches = Anagram.match("corn", ["corn", "dark", "Corn", "rank", "CORN", "cron", "park"])
assert matches == ["cron"]
end
@tag :pending
test "do not detect words based on checksum" do
matches = Anagram.match("mass", ["last"])
assert matches == []
end
end
| 27.157143 | 93 | 0.65071 |
9eb4aab3c26745b4f57511edd4473d3300cd181b | 12,235 | ex | Elixir | lib/grizzly/zwave/decoder.ex | jellybob/grizzly | 290bee04cb16acbb9dc996925f5c501697b7ac94 | [
"Apache-2.0"
] | null | null | null | lib/grizzly/zwave/decoder.ex | jellybob/grizzly | 290bee04cb16acbb9dc996925f5c501697b7ac94 | [
"Apache-2.0"
] | null | null | null | lib/grizzly/zwave/decoder.ex | jellybob/grizzly | 290bee04cb16acbb9dc996925f5c501697b7ac94 | [
"Apache-2.0"
] | null | null | null | defmodule Grizzly.ZWave.Decoder do
@moduledoc false
defmodule Generate do
@moduledoc false
alias Grizzly.ZWave.{Command, Commands, DecodeError}
@mappings [
# {command_class_byte, command_byte, command_module}
# Basic
{0x20, 0x01, Commands.BasicSet},
{0x20, 0x02, Commands.BasicGet},
{0x20, 0x03, Commands.BasicReport},
# Application status
{0x22, 0x01, Commands.ApplicationBusy},
{0x22, 0x02, Commands.ApplicationRejectedRequest},
# Battery
{0x80, 0x02, Commands.BatteryGet},
{0x80, 0x03, Commands.BatteryReport},
# Z/IP (0x23)
{0x23, 0x02, Commands.ZIPPacket},
{0x23, 0x03, Commands.ZIPKeepAlive},
# Switch Binary (0x25)
{0x25, 0x01, Commands.SwitchBinarySet},
{0x25, 0x02, Commands.SwitchBinaryGet},
{0x25, 0x03, Commands.SwitchBinaryReport},
# Switch Multilevel (0x26)
{0x26, 0x01, Commands.SwitchMultilevelSet},
{0x26, 0x02, Commands.SwitchMultilevelGet},
{0x26, 0x03, Commands.SwitchMultilevelReport},
{0x26, 0x04, Commands.SwitchMultilevelStartLevelChange},
{0x26, 0x05, Commands.SwitchMultiLevelStopLevelChange},
# Sensor binary
{0x30, 0x02, Commands.SensorBinaryGet},
{0x30, 0x03, Commands.SensorBinaryReport},
# Network Management Inclusion (0x34)
{0x34, 0x01, Commands.NodeAdd},
{0x34, 0x02, Commands.NodeAddStatus},
{0x34, 0x03, Commands.NodeRemove},
{0x34, 0x04, Commands.NodeRemoveStatus},
{0x34, 0x07, Commands.FailedNodeRemove},
{0x34, 0x08, Commands.FailedNodeRemoveStatus},
{0x34, 0x11, Commands.NodeAddKeysReport},
{0x34, 0x12, Commands.NodeAddKeysSet},
{0x34, 0x13, Commands.NodeAddDSKReport},
{0x34, 0x14, Commands.NodeAddDSKSet},
{0x34, 0x15, Commands.SmartStartJoinStarted},
# Network Management Basic Node (0x4D)
{0x4D, 0x01, Commands.LearnModeSet},
{0x4D, 0x02, Commands.LearnModeSetStatus},
{0x4D, 0x07, Commands.DefaultSetComplete},
{0x4D, 0x08, Commands.DSKGet},
{0x4D, 0x09, Commands.DSKReport},
# Network Management Proxy (0x52)
{0x52, 0x01, Commands.NodeListGet},
{0x52, 0x02, Commands.NodeListReport},
{0x52, 0x04, Commands.NodeInfoCacheReport},
{0x52, 0x0C, Commands.FailedNodeListReport},
# Multi Channel
{0x60, 0x07, Commands.MultiChannelEndpointGet},
{0x60, 0x08, Commands.MultiChannelEndpointReport},
{0x60, 0x09, Commands.MultiChannelCapabilityGet},
{0x60, 0x0A, Commands.MultiChannelCapabilityReport},
{0x60, 0x0B, Commands.MultiChannelEndpointFind},
{0x60, 0x0C, Commands.MultiChannelEndpointFindReport},
{0x60, 0x0D, Commands.MultiChannelCommandEncapsulation},
{0x60, 0x0E, Commands.MultiChannelAggregatedMembersGet},
{0x60, 0x0F, Commands.MultiChannelAggregatedMembersReport},
# Association group info
{0x59, 0x01, Commands.AssociationGroupNameGet},
{0x59, 0x02, Commands.AssociationGroupNameReport},
{0x59, 0x03, Commands.AssociationGroupInfoGet},
{0x59, 0x04, Commands.AssociationGroupInfoReport},
{0x59, 0x05, Commands.AssociationGroupCommandListGet},
{0x59, 0x06, Commands.AssociationGroupCommandListReport},
# Central scene
{0x5B, 0x01, Commands.CentralSceneSupportedGet},
{0x5B, 0x02, Commands.CentralSceneSupportedReport},
{0x5B, 0x03, Commands.CentralSceneNotification},
{0x5B, 0x04, Commands.CentralSceneConfigurationSet},
{0x5B, 0x05, Commands.CentralSceneConfigurationGet},
{0x5B, 0x06, Commands.CentralSceneConfigurationReport},
# Antitheft
{0x5D, 0x01, Commands.AntitheftSet},
{0x5D, 0x02, Commands.AntitheftGet},
{0x5D, 0x03, Commands.AntitheftReport},
# Z/IP Gateway
{0x5F, 0x0C, Commands.ApplicationNodeInfoGet},
{0x5F, 0x0D, Commands.ApplicationNodeInfoReport},
# Door Lock
{0x62, 0x01, Commands.DoorLockOperationSet},
{0x62, 0x02, Commands.DoorLockOperationGet},
{0x62, 0x03, Commands.DoorLockOperationReport},
# User Code
{0x63, 0x01, Commands.UserCodeSet},
{0x63, 0x02, Commands.UserCodeGet},
{0x63, 0x03, Commands.UserCodeReport},
{0x63, 0x04, Commands.UserCodeUsersNumberGet},
{0x63, 0x05, Commands.UserCodeUsersNumberReport},
# Supervision
{0x6C, 0x01, Commands.SupervisionGet},
{0x6C, 0x02, Commands.SupervisionReport},
# Configuration
{0x70, 0x04, Commands.ConfigurationSet},
{0x70, 0x05, Commands.ConfigurationGet},
{0x70, 0x06, Commands.ConfigurationReport},
{0x70, 0x07, Commands.ConfigurationBulkSet},
{0x70, 0x08, Commands.ConfigurationBulkGet},
{0x70, 0x09, Commands.ConfigurationBulkReport},
{0x70, 0x0E, Commands.ConfigurationPropertiesGet},
{0x70, 0x0F, Commands.ConfigurationPropertiesReport},
# Alarm
{0x71, 0x01, Commands.AlarmEventSupportedGet},
{0x71, 0x02, Commands.AlarmEventSupportedReport},
{0x71, 0x04, Commands.AlarmGet},
{0x71, 0x05, Commands.AlarmReport},
{0x71, 0x06, Commands.AlarmSet},
{0x71, 0x07, Commands.AlarmTypeSupportedGet},
{0x71, 0x08, Commands.AlarmTypeSupportedReport},
# Manufacturer Specific
{0x72, 0x04, Commands.ManufacturerSpecificGet},
{0x72, 0x05, Commands.ManufacturerSpecificReport},
{0x72, 0x06, Commands.ManufacturerSpecificDeviceSpecificGet},
{0x72, 0x07, Commands.ManufacturerSpecificDeviceSpecificReport},
# Antitheft unlock
{0x7E, 0x01, Commands.AntitheftUnlockGet},
{0x7E, 0x02, Commands.AntitheftUnlockReport},
{0x7E, 0x03, Commands.AntitheftUnlockSet},
# Hail
{0x82, 0x01, Commands.Hail},
# Association (0x85)
{0x85, 0x01, Commands.AssociationSet},
{0x85, 0x02, Commands.AssociationGet},
{0x85, 0x03, Commands.AssociationReport},
{0x85, 0x04, Commands.AssociationRemove},
{0x85, 0x05, Commands.AssociationGroupingsGet},
{0x85, 0x06, Commands.AssociationGroupingsReport},
{0x85, 0x0B, Commands.AssociationSpecificGroupGet},
{0x85, 0x0C, Commands.AssociationSpecificGroupReport},
# Multi Channel Association (0x8E)
{0x8E, 0x01, Commands.MultiChannelAssociationSet},
{0x8E, 0x02, Commands.MultiChannelAssociationGet},
{0x8E, 0x03, Commands.MultiChannelAssociationReport},
{0x8E, 0x04, Commands.MultiChannelAssociationRemove},
{0x8E, 0x05, Commands.MultiChannelAssociationGroupingsGet},
{0x8E, 0x06, Commands.MultiChannelAssociationGroupingsReport},
# Version (0x86)
{0x86, 0x11, Commands.VersionGet},
{0x86, 0x12, Commands.VersionReport},
{0x86, 0x13, Commands.CommandClassGet},
{0x86, 0x14, Commands.CommandClassReport},
# Firmware Update Metadata
{0x7A, 0x01, Commands.FirmwareMDGet},
{0x7A, 0x02, Commands.FirmwareMDReport},
{0x7A, 0x03, Commands.FirmwareUpdateMDRequestGet},
{0x7A, 0x04, Commands.FirmwareUpdateMDRequestReport},
{0x7A, 0x05, Commands.FirmwareUpdateMDGet},
{0x7A, 0x06, Commands.FirmwareUpdateMDReport},
{0x7A, 0x07, Commands.FirmwareUpdateMDStatusReport},
{0x7A, 0x08, Commands.FirmwareUpdateActivationSet},
{0x7A, 0x09, Commands.FirmwareUpdateActivationReport},
# Wake Up
{0x84, 0x04, Commands.WakeUpIntervalSet},
{0x84, 0x05, Commands.WakeUpIntervalSet},
{0x84, 0x06, Commands.WakeUpIntervalReport},
{0x84, 0x07, Commands.WakeUpNotification},
{0x84, 0x08, Commands.WakeUpNoMoreInformation},
{0x84, 0x09, Commands.WakeUpIntervalCapabilitiesGet},
{0x84, 0x0A, Commands.WakeUpIntervalCapabilitiesReport},
# Sensor multilevel
{0x31, 0x01, Commands.SensorMultilevelSupportedSensorGet},
{0x31, 0x02, Commands.SensorMultilevelSupportedSensorReport},
{0x31, 0x04, Commands.SensorMultilevelGet},
{0x31, 0x05, Commands.SensorMultilevelReport},
# Meter
{0x32, 0x01, Commands.MeterGet},
{0x32, 0x02, Commands.MeterReport},
# Thermostat mode
{0x40, 0x01, Commands.ThermostatModeSet},
{0x40, 0x02, Commands.ThermostatModeGet},
{0x40, 0x03, Commands.ThermostatModeReport},
# Thermostat setpoint
{0x43, 0x01, Commands.ThermostatSetpointSet},
{0x43, 0x02, Commands.ThermostatSetpointGet},
{0x43, 0x03, Commands.ThermostatSetpointReport},
# Thermostat fan mode
{0x44, 0x01, Commands.ThermostatFanModeSet},
{0x44, 0x02, Commands.ThermostatFanModeGet},
{0x44, 0x03, Commands.ThermostatFanModeReport},
# Thermostat fan state
{0x45, 0x02, Commands.ThermostatFanStateGet},
{0x45, 0x03, Commands.ThermostatFanStateReport},
# Thermostat setback
{0x47, 0x01, Commands.ThermostatSetbackSet},
{0x47, 0x02, Commands.ThermostatSetbackGet},
{0x47, 0x03, Commands.ThermostatSetbackReport},
# Thermostat operating state
{0x42, 0x02, Commands.ThermostatOperatingStateGet},
{0x42, 0x03, Commands.ThermostatOperatingStateReport},
# Node provisioning
{0x78, 0x01, Commands.NodeProvisioningSet},
{0x78, 0x02, Commands.NodeProvisioningDelete},
{0x78, 0x03, Commands.NodeProvisioningListIterationGet},
{0x78, 0x04, Commands.NodeProvisioningListIterationReport},
{0x78, 0x05, Commands.NodeProvisioningGet},
{0x78, 0x06, Commands.NodeProvisioningReport},
# Node naming and location
{0x77, 0x01, Commands.NodeNameSet},
{0x77, 0x02, Commands.NodeNameGet},
{0x77, 0x03, Commands.NodeNameReport},
{0x77, 0x04, Commands.NodeLocationSet},
{0x77, 0x05, Commands.NodeLocationGet},
{0x77, 0x06, Commands.NodeLocationReport},
# Time parameters
{0x8B, 0x01, Commands.TimeParametersSet},
{0x8B, 0x02, Commands.TimeParametersGet},
{0x8B, 0x03, Commands.TimeParametersReport},
# Device reset locally
{0x5A, 0x01, Commands.DeviceResetLocallyNotification},
# Indicator
{0x8A, 0x01, Commands.TimeGet},
{0x8A, 0x02, Commands.TimeReport},
{0x8A, 0x03, Commands.DateGet},
{0x8A, 0x04, Commands.DateReport},
{0x8A, 0x05, Commands.TimeOffsetSet},
{0x8A, 0x06, Commands.TimeOffsetGet},
{0x8A, 0x07, Commands.TimeOffsetReport},
# Time
{0x87, 0x01, Commands.IndicatorSet},
{0x87, 0x02, Commands.IndicatorGet},
{0x87, 0x03, Commands.IndicatorReport},
{0x87, 0x04, Commands.IndicatorSupportedGet},
{0x87, 0x05, Commands.IndicatorSupportedReport},
{0x87, 0x06, Commands.IndicatorDescriptionGet},
{0x87, 0x07, Commands.IndicatorDescriptionReport}
]
defmacro __before_compile__(_) do
# Exceptions
from_binary =
for {command_class_byte, command_byte, command_module} <- @mappings do
quote do
def from_binary(
<<unquote(command_class_byte), unquote(command_byte), params::binary>>
),
do: decode(unquote(command_module), params)
end
end
command_module =
for {command_class_byte, command_byte, command_module} <- @mappings do
quote do
def command_module(unquote(command_class_byte), unquote(command_byte)),
do: {:ok, unquote(command_module)}
end
end
quote do
@spec from_binary(binary) :: {:ok, Command.t()} | {:error, DecodeError.t()}
unquote(from_binary)
# No Operation (0x00) - There is no command byte or args for this command, only the command class byte
def from_binary(<<0x00>>), do: decode(Commands.NoOperation, [])
@spec command_module(byte, byte) :: {:ok, module} | {:error, :unsupported_command}
unquote(command_module)
def command_module(_cc_byte, _c_byte), do: {:error, :unsupported_command}
defp decode(command_impl, params) do
case command_impl.decode_params(params) do
{:ok, decoded_params} ->
command_impl.new(decoded_params)
{:error, %DecodeError{}} = error ->
error
end
end
end
end
end
@before_compile Generate
end
| 43.080986 | 110 | 0.681079 |
9eb4b790b9b9256b16e0e2a579cd47e639b90908 | 1,839 | ex | Elixir | lib/working_with_multiple_processes_5/monitor.ex | mikan/elixir-practice | 624525605eb2324e0c55a4ddcb68388c0d2ecefc | [
"Apache-2.0"
] | null | null | null | lib/working_with_multiple_processes_5/monitor.ex | mikan/elixir-practice | 624525605eb2324e0c55a4ddcb68388c0d2ecefc | [
"Apache-2.0"
] | 1 | 2020-01-28T00:19:53.000Z | 2020-01-28T00:19:53.000Z | lib/working_with_multiple_processes_5/monitor.ex | mikan/elixir-practice | 624525605eb2324e0c55a4ddcb68388c0d2ecefc | [
"Apache-2.0"
] | null | null | null | defmodule WorkingWithMultipleProcesses5 do
import :timer, only: [ sleep: 1 ]
def sad_function_exit(back) do
send back, "message sent"
exit(:boom)
end
def sad_function_raise(back) do
send back, "message sent"
raise "boom"
end
def run_exit do
Process.flag(:trap_exit, true)
res = spawn_monitor(__MODULE__, :sad_function_exit, [self()])
IO.puts inspect res
sleep 500
receiver()
end
def run_raise do
Process.flag(:trap_exit, true)
res = spawn_monitor(__MODULE__, :sad_function_raise, [self()])
IO.puts inspect res
sleep 500
receiver()
end
def receiver do
receive do
msg -> IO.puts "MESSAGE RECEIVED: #{inspect msg}"
receiver()
after 1500 -> IO.puts "Nothing happend as far as I am concerned"
end
end
end
IO.puts "----- exit -----"
WorkingWithMultipleProcesses5.run_exit()
IO.puts "----- raise -----"
WorkingWithMultipleProcesses5.run_raise()
# Result:
# ----- exit -----
# {#PID<0.78.0>, #Reference<0.3230801546.2264924164.207160>}
# MESSAGE RECEIVED: "message sent"
# MESSAGE RECEIVED: {:DOWN, #Reference<0.3230801546.2264924164.207160>, :process, #PID<0.78.0>, :boom}
# Nothing happend as far as I am concerned
# ----- raise -----
# {#PID<0.79.0>, #Reference<0.3230801546.2264924164.207187>}
#
# 23:11:43.160 [error] Process #PID<0.79.0> raised an exception
# ** (RuntimeError) boom
# lib/working_with_multiple_processes_5/monitor.ex:10: WorkingWithMultipleProcesses5.sad_function_raise/1
# MESSAGE RECEIVED: "message sent"
# MESSAGE RECEIVED: {:DOWN, #Reference<0.3230801546.2264924164.207187>, :process, #PID<0.79.0>, {%RuntimeError{message: "b
# oom"}, [{WorkingWithMultipleProcesses5, :sad_function_raise, 1, [file: 'lib/working_with_multiple_processes_5/monitor.ex
# ', line: 10]}]}}
# Nothing happend as far as I am concerned
| 32.263158 | 122 | 0.693312 |
9eb4bb95ba148317487cb67f3e97ee7a2372009f | 674 | exs | Elixir | test/game/command/crash_test.exs | nomicflux/ex_venture | 3e87dc8802c24067256d99856198c814d0bae4d6 | [
"MIT"
] | null | null | null | test/game/command/crash_test.exs | nomicflux/ex_venture | 3e87dc8802c24067256d99856198c814d0bae4d6 | [
"MIT"
] | null | null | null | test/game/command/crash_test.exs | nomicflux/ex_venture | 3e87dc8802c24067256d99856198c814d0bae4d6 | [
"MIT"
] | null | null | null | defmodule Game.Command.CrashTest do
use ExVenture.CommandCase
alias Game.Command.Crash
doctest Crash
setup do
user = create_user(%{name: "user", password: "password", flags: ["admin"]})
%{state: %{socket: :socket, user: user, save: %{room_id: 10}}}
end
describe "crashing a room" do
test "sends a signal to crash the room you are in", %{state: state} do
:ok = Crash.run({:room}, state)
assert_socket_echo "crash"
end
test "you must be an admin", %{state: state} do
state = %{state | user: %{state.user | flags: []}}
:ok = Crash.run({:room}, state)
assert_socket_echo "must be an admin"
end
end
end
| 23.241379 | 79 | 0.624629 |
9eb4d490242c2069fd2a5f76a2ca9f67eb551145 | 778 | ex | Elixir | lib/yodlee/refreshinfo.ex | copia-wealth-studios/yodlee-elixir | f39ba0f32082e9b2116c2ba0fe7da8c6a100e5ad | [
"MIT"
] | 1 | 2019-11-04T09:56:02.000Z | 2019-11-04T09:56:02.000Z | lib/yodlee/refreshinfo.ex | copia-wealth-studios/yodlee-elixir | f39ba0f32082e9b2116c2ba0fe7da8c6a100e5ad | [
"MIT"
] | 2 | 2018-07-11T10:45:09.000Z | 2018-07-12T17:47:53.000Z | lib/yodlee/refreshinfo.ex | copia-wealth-studios/yodlee-elixir | f39ba0f32082e9b2116c2ba0fe7da8c6a100e5ad | [
"MIT"
] | 1 | 2021-07-19T18:20:27.000Z | 2021-07-19T18:20:27.000Z | defmodule Yodlee.Refreshinfo do
@moduledoc """
Yodlee data structure for provider account refresh information.
"""
defstruct status_code: nil, status_message: nil, next_refresh_scheduled: nil,
last_refreshed: nil, last_refresh_attempt: nil, action_required: nil,
additional_status: nil, message: nil
@type t :: %__MODULE__{status_code: String.t,
status_message: String.t,
next_refresh_scheduled: String.t,
last_refreshed: String.t,
last_refresh_attempt: String.t,
action_required: String.t,
additional_status: String.t,
message: String.t
}
end
| 40.947368 | 81 | 0.564267 |
9eb4d564ad78629dc855a1868b8da8ae93d82a8c | 826 | ex | Elixir | test/support/mocks.ex | eeng/blaine | d91d94034925d64632789925ef90a198c0a1d8ea | [
"MIT"
] | null | null | null | test/support/mocks.ex | eeng/blaine | d91d94034925d64632789925ef90a198c0a1d8ea | [
"MIT"
] | null | null | null | test/support/mocks.ex | eeng/blaine | d91d94034925d64632789925ef90a198c0a1d8ea | [
"MIT"
] | null | null | null | Mox.defmock(Blaine.Services.MockAccountsManager,
for: Blaine.Services.AccountsManager.Behaviour
)
Mox.defmock(Blaine.Services.MockUploadsService,
for: Blaine.Services.UploadsService.Behaviour
)
Mox.defmock(Blaine.Persistance.MockRepository, for: Blaine.Persistance.Repository)
Mox.defmock(Blaine.Google.MockAuthAPI, for: Blaine.Google.AuthAPI.Behaviour)
Mox.defmock(Blaine.Google.MockPeopleAPI, for: Blaine.Google.PeopleAPI.Behaviour)
Mox.defmock(Blaine.Google.MockYouTubeAPI, for: Blaine.Google.YouTubeAPI.Behaviour)
Mox.defmock(Blaine.Util.MockHTTP, for: Blaine.Util.HTTP.Behaviour)
Mox.defmock(Blaine.MockClock, for: Blaine.Util.Clock.Behaviour)
defmodule Blaine.Mocks do
defmacro __using__(_opts) do
quote do
import Mox
setup :verify_on_exit!
setup :set_mox_from_context
end
end
end
| 30.592593 | 82 | 0.799031 |
9eb4de1873f9754a3430ba83a4a595a0c7344044 | 1,048 | ex | Elixir | lib/ecto/query/builder/lock.ex | timgestson/ecto | 1c1eb6d322db04cfa48a4fc81da1332e91adbc1f | [
"Apache-2.0"
] | 1 | 2019-05-07T15:05:52.000Z | 2019-05-07T15:05:52.000Z | lib/ecto/query/builder/lock.ex | timgestson/ecto | 1c1eb6d322db04cfa48a4fc81da1332e91adbc1f | [
"Apache-2.0"
] | null | null | null | lib/ecto/query/builder/lock.ex | timgestson/ecto | 1c1eb6d322db04cfa48a4fc81da1332e91adbc1f | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Query.Builder.Lock do
@moduledoc false
alias Ecto.Query.Builder
@doc """
Escapes the lock code.
iex> escape(quote do: "FOO")
"FOO"
"""
@spec escape(Macro.t) :: Macro.t | no_return
def escape(lock) when is_binary(lock), do: lock
def escape(other) do
Builder.error! "`#{Macro.to_string(other)}` is not a valid lock. " <>
"For security reasons, a lock must always be a literal string"
end
@doc """
Builds a quoted expression.
The quoted expression should evaluate to a query at runtime.
If possible, it does all calculations at compile time to avoid
runtime work.
"""
@spec build(Macro.t, Macro.t, Macro.Env.t) :: Macro.t
def build(query, expr, env) do
Builder.apply_query(query, __MODULE__, [escape(expr)], env)
end
@doc """
The callback applied by `build/4` to build the query.
"""
@spec apply(Ecto.Queryable.t, term) :: Ecto.Query.t
def apply(query, value) do
query = Ecto.Queryable.to_query(query)
%{query | lock: value}
end
end
| 24.952381 | 81 | 0.655534 |
9eb50b7964f2f41edd97b38c3b8e378b5b286b90 | 270 | ex | Elixir | apps/api/lib/api/configuration.ex | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | 4 | 2020-11-30T17:38:57.000Z | 2021-01-23T21:29:41.000Z | apps/api/lib/api/configuration.ex | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | 24 | 2020-11-30T17:32:48.000Z | 2021-02-22T06:25:22.000Z | apps/api/lib/api/configuration.ex | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | null | null | null | defmodule API.Configuration do
@moduledoc false
@app :api
@spec port() :: pos_integer()
def port() do
Application.get_env(@app, :port)
end
@spec cors_enabled?() :: bool()
def cors_enabled?() do
Application.get_env(@app, :cors_enabled)
end
end
| 16.875 | 44 | 0.662963 |
9eb55d053c4d0c92808fa47c84287a4b3c4b6fb7 | 1,365 | ex | Elixir | clients/elixir/generated/lib/cloud_manager_api/model/pipeline_execution.ex | shinesolutions/cloudmanager-api-clients | d73a25878f6cc57af954362ba8dccc90d54e6131 | [
"Apache-2.0"
] | 3 | 2020-06-23T05:31:52.000Z | 2020-11-26T05:34:57.000Z | clients/elixir/generated/lib/cloud_manager_api/model/pipeline_execution.ex | shinesolutions/cloudmanager-api-clients | d73a25878f6cc57af954362ba8dccc90d54e6131 | [
"Apache-2.0"
] | 2 | 2021-01-21T01:19:54.000Z | 2021-12-09T22:30:22.000Z | clients/elixir/generated/lib/cloud_manager_api/model/pipeline_execution.ex | shinesolutions/cloudmanager-api-clients | d73a25878f6cc57af954362ba8dccc90d54e6131 | [
"Apache-2.0"
] | 1 | 2020-11-18T11:48:13.000Z | 2020-11-18T11:48:13.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule CloudManagerAPI.Model.PipelineExecution do
@moduledoc """
Wraps a pipeline execution
"""
@derive [Poison.Encoder]
defstruct [
:"id",
:"programId",
:"pipelineId",
:"artifactsVersion",
:"user",
:"status",
:"trigger",
:"createdAt",
:"updatedAt",
:"finishedAt",
:"_embedded",
:"_links"
]
@type t :: %__MODULE__{
:"id" => String.t | nil,
:"programId" => String.t | nil,
:"pipelineId" => String.t | nil,
:"artifactsVersion" => String.t | nil,
:"user" => String.t | nil,
:"status" => String.t | nil,
:"trigger" => String.t | nil,
:"createdAt" => DateTime.t | nil,
:"updatedAt" => DateTime.t | nil,
:"finishedAt" => DateTime.t | nil,
:"_embedded" => PipelineExecutionEmbedded | nil,
:"_links" => PipelineExecutionLinks | nil
}
end
defimpl Poison.Decoder, for: CloudManagerAPI.Model.PipelineExecution do
import CloudManagerAPI.Deserializer
def decode(value, options) do
value
|> deserialize(:"_embedded", :struct, CloudManagerAPI.Model.PipelineExecutionEmbedded, options)
|> deserialize(:"_links", :struct, CloudManagerAPI.Model.PipelineExecutionLinks, options)
end
end
| 26.764706 | 99 | 0.643956 |
9eb57e3a2652dd841746e803d67b86f0fe139d3c | 8,038 | ex | Elixir | lib/vintage_net/ip/dhcpd_config.ex | axelson/vintage_net | 68866ff223fa40e7ad6fd4d9fb2f7960cf51b1ca | [
"Apache-2.0"
] | 85 | 2019-05-09T14:54:38.000Z | 2022-02-08T16:52:04.000Z | lib/vintage_net/ip/dhcpd_config.ex | axelson/vintage_net | 68866ff223fa40e7ad6fd4d9fb2f7960cf51b1ca | [
"Apache-2.0"
] | 132 | 2019-05-09T15:57:59.000Z | 2022-02-28T16:31:22.000Z | lib/vintage_net/ip/dhcpd_config.ex | axelson/vintage_net | 68866ff223fa40e7ad6fd4d9fb2f7960cf51b1ca | [
"Apache-2.0"
] | 14 | 2019-07-08T19:18:23.000Z | 2022-02-08T16:52:05.000Z | defmodule VintageNet.IP.DhcpdConfig do
@moduledoc """
This is a helper module for VintageNet.Technology implementations that use
a DHCP server.
DHCP server parameters are:
* `:start` - Start of the lease block
* `:end` - End of the lease block
* `:max_leases` - The maximum number of leases
* `:decline_time` - The amount of time that an IP will be reserved (leased to nobody)
* `:conflict_time` -The amount of time that an IP will be reserved
* `:offer_time` - How long an offered address is reserved (seconds)
* `:min_lease` - If client asks for lease below this value, it will be rounded up to this value (seconds)
* `:auto_time` - The time period at which udhcpd will write out leases file.
* `:static_leases` - list of `{mac_address, ip_address}`
* `:options` - a map DHCP response options to set. See below.
DHCP response options are (see RFC 2132 for details):
* `:dns` - IP_LIST
* `:domain` - STRING - [0x0f] client's domain suffix
* `:hostname` - STRING
* `:mtu` - NUM
* `:router` - IP_LIST
* `:search` - STRING_LIST - [0x77] search domains
* `:serverid` - IP (defaults to the interface's IP address)
* `:subnet` - IP
Options may also be passed in as integers. These are passed directly to the DHCP server
and their values are strings that are not interpreted by VintageNet. Use this to support
custom DHCP header options.
"""
alias VintageNet.{Command, IP}
alias VintageNet.Interface.RawConfig
@ip_list_options [:dns, :router]
@ip_options [:serverid, :subnet]
@int_options [:mtu]
@string_options [:hostname, :domain]
@string_list_options [:search]
@list_options @ip_list_options ++ @string_list_options
@doc """
Normalize the DHCPD parameters in a configuration.
"""
@spec normalize(map()) :: map()
def normalize(%{dhcpd: dhcpd} = config) do
# Normalize IP addresses
new_dhcpd =
dhcpd
|> Map.update(:start, {192, 168, 0, 20}, &IP.ip_to_tuple!/1)
|> Map.update(:end, {192, 168, 0, 254}, &IP.ip_to_tuple!/1)
|> normalize_static_leases()
|> normalize_options()
|> Map.take([
:start,
:end,
:max_leases,
:decline_time,
:conflict_time,
:offer_time,
:min_lease,
:auto_time,
:static_leases,
:options
])
%{config | dhcpd: new_dhcpd}
end
def normalize(config), do: config
defp normalize_static_leases(%{static_leases: leases} = dhcpd_config) do
new_leases = Enum.map(leases, &normalize_lease/1)
%{dhcpd_config | static_leases: new_leases}
end
defp normalize_static_leases(dhcpd_config), do: dhcpd_config
defp normalize_lease({hwaddr, ipa}) do
{hwaddr, IP.ip_to_tuple!(ipa)}
end
defp normalize_options(%{options: options} = dhcpd_config) do
new_options = for option <- options, into: %{}, do: normalize_option(option)
%{dhcpd_config | options: new_options}
end
defp normalize_options(dhcpd_config), do: dhcpd_config
defp normalize_option({ip_option, ip})
when ip_option in @ip_options do
{ip_option, IP.ip_to_tuple!(ip)}
end
defp normalize_option({ip_list_option, ip_list})
when ip_list_option in @ip_list_options and is_list(ip_list) do
{ip_list_option, Enum.map(ip_list, &IP.ip_to_tuple!/1)}
end
defp normalize_option({string_list_option, string_list})
when string_list_option in @string_list_options and is_list(string_list) do
{string_list_option, Enum.map(string_list, &to_string/1)}
end
defp normalize_option({list_option, one_item})
when list_option in @list_options and not is_list(one_item) do
# Fix super-easy mistake of not passing a list when there's only one item
normalize_option({list_option, [one_item]})
end
defp normalize_option({int_option, value})
when int_option in @int_options and
is_integer(value) do
{int_option, value}
end
defp normalize_option({string_option, string})
when string_option in @string_options do
{string_option, to_string(string)}
end
defp normalize_option({other_option, string})
when is_integer(other_option) and is_binary(string) do
{other_option, to_string(string)}
end
defp normalize_option({bad_option, _value}) do
raise ArgumentError,
"Unknown dhcpd option '#{bad_option}'. Options unknown to VintageNet can be passed in as integers."
end
@doc """
Add udhcpd configuration commands for running a DHCP server
"""
@spec add_config(RawConfig.t(), map(), keyword()) :: RawConfig.t()
def add_config(
%RawConfig{
ifname: ifname,
files: files,
child_specs: child_specs
} = raw_config,
%{dhcpd: dhcpd_config},
opts
) do
tmpdir = Keyword.fetch!(opts, :tmpdir)
udhcpd_conf_path = Path.join(tmpdir, "udhcpd.conf.#{ifname}")
new_files =
files ++
[
{udhcpd_conf_path, udhcpd_contents(ifname, dhcpd_config, tmpdir)}
]
new_child_specs =
child_specs ++
[
Supervisor.child_spec(
{MuonTrap.Daemon,
[
"udhcpd",
[
"-f",
udhcpd_conf_path
],
Command.add_muon_options(
stderr_to_stdout: true,
log_output: :debug,
env: BEAMNotify.env(name: "vintage_net_comm", report_env: true)
)
]},
id: :udhcpd
)
]
%RawConfig{raw_config | files: new_files, child_specs: new_child_specs}
end
def add_config(raw_config, _config_without_dhcpd, _opts), do: raw_config
defp udhcpd_contents(ifname, dhcpd, tmpdir) do
pidfile = Path.join(tmpdir, "udhcpd.#{ifname}.pid")
lease_file = Path.join(tmpdir, "udhcpd.#{ifname}.leases")
initial = """
interface #{ifname}
pidfile #{pidfile}
lease_file #{lease_file}
notify_file #{BEAMNotify.bin_path()}
"""
config = Enum.map(dhcpd, &to_udhcpd_string/1)
IO.iodata_to_binary([initial, "\n", config, "\n"])
end
defp to_udhcpd_string({:start, val}) do
"start #{IP.ip_to_string(val)}\n"
end
defp to_udhcpd_string({:end, val}) do
"end #{IP.ip_to_string(val)}\n"
end
defp to_udhcpd_string({:max_leases, val}) do
"max_leases #{val}\n"
end
defp to_udhcpd_string({:decline_time, val}) do
"decline_time #{val}\n"
end
defp to_udhcpd_string({:conflict_time, val}) do
"conflict_time #{val}\n"
end
defp to_udhcpd_string({:offer_time, val}) do
"offer_time #{val}\n"
end
defp to_udhcpd_string({:min_lease, val}) do
"min_lease #{val}\n"
end
defp to_udhcpd_string({:auto_time, val}) do
"auto_time #{val}\n"
end
defp to_udhcpd_string({:static_leases, leases}) do
Enum.map(leases, fn {mac, ip} ->
"static_lease #{mac} #{IP.ip_to_string(ip)}\n"
end)
end
defp to_udhcpd_string({:options, options}) do
for option <- options do
["opt ", to_udhcpd_option_string(option), "\n"]
end
end
defp to_udhcpd_option_string({option, ip}) when option in @ip_options do
[to_string(option), " ", IP.ip_to_string(ip)]
end
defp to_udhcpd_option_string({option, ip_list}) when option in @ip_list_options do
[to_string(option), " " | ip_list_to_iodata(ip_list)]
end
defp to_udhcpd_option_string({option, string_list}) when option in @string_list_options do
[to_string(option), " " | Enum.intersperse(string_list, " ")]
end
defp to_udhcpd_option_string({option, value}) when option in @int_options do
[to_string(option), " ", to_string(value)]
end
defp to_udhcpd_option_string({option, string}) when option in @string_options do
[to_string(option), " ", string]
end
defp to_udhcpd_option_string({other_option, string}) when is_integer(other_option) do
[to_string(other_option), " ", string]
end
defp ip_list_to_iodata(ip_list) do
ip_list
|> Enum.map(&IP.ip_to_string/1)
|> Enum.intersperse(" ")
end
end
| 29.551471 | 109 | 0.657502 |
9eb58352d1bcd99004d7e46929f49d84e66c17c2 | 1,886 | ex | Elixir | clients/display_video/lib/google_api/display_video/v1/model/bulk_edit_partner_assigned_targeting_options_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/display_video/lib/google_api/display_video/v1/model/bulk_edit_partner_assigned_targeting_options_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/display_video/lib/google_api/display_video/v1/model/bulk_edit_partner_assigned_targeting_options_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DisplayVideo.V1.Model.BulkEditPartnerAssignedTargetingOptionsResponse do
@moduledoc """
## Attributes
* `createdAssignedTargetingOptions` (*type:* `list(GoogleApi.DisplayVideo.V1.Model.AssignedTargetingOption.t)`, *default:* `nil`) - The list of assigned targeting options that have been successfully created. This list will be absent if empty.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:createdAssignedTargetingOptions =>
list(GoogleApi.DisplayVideo.V1.Model.AssignedTargetingOption.t()) | nil
}
field(:createdAssignedTargetingOptions,
as: GoogleApi.DisplayVideo.V1.Model.AssignedTargetingOption,
type: :list
)
end
defimpl Poison.Decoder,
for: GoogleApi.DisplayVideo.V1.Model.BulkEditPartnerAssignedTargetingOptionsResponse do
def decode(value, options) do
GoogleApi.DisplayVideo.V1.Model.BulkEditPartnerAssignedTargetingOptionsResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DisplayVideo.V1.Model.BulkEditPartnerAssignedTargetingOptionsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.678571 | 246 | 0.76193 |
9eb5a644844974ada71d13b906941bc80b04ad5e | 757 | ex | Elixir | lib/ophion_ircv3.ex | ophion-project/ophion_ircv3 | 365f74bb430c1c29ef23c71b21c127f4550a0369 | [
"BSD-3-Clause"
] | null | null | null | lib/ophion_ircv3.ex | ophion-project/ophion_ircv3 | 365f74bb430c1c29ef23c71b21c127f4550a0369 | [
"BSD-3-Clause"
] | null | null | null | lib/ophion_ircv3.ex | ophion-project/ophion_ircv3 | 365f74bb430c1c29ef23c71b21c127f4550a0369 | [
"BSD-3-Clause"
] | null | null | null | defmodule Ophion.IRCv3 do
@moduledoc """
Documentation for `Ophion.IRCv3`.
"""
alias Ophion.IRCv3.Composer
alias Ophion.IRCv3.Message
alias Ophion.IRCv3.Parser
@doc """
Parse an IRCv3 frame into an `%Ophion.IRCv3.Message{}`.
"""
def parse(msg) do
with {:ok, %Message{} = data} <- Parser.parse(msg) do
{:ok, data}
else
{:error, e} ->
{:error, e}
e ->
{:error, {:unknown_error, e}}
end
end
@doc """
Compose an IRCv3 frame from an `%Ophion.IRCv3.Message{}`.
"""
def compose(%Message{} = msg) do
with {:ok, data} <- Composer.compose(msg) do
{:ok, data}
else
{:error, e} ->
{:error, e}
e ->
{:error, {:unknown_error, e}}
end
end
end
| 18.925 | 59 | 0.546896 |
9eb5bd10662dbe5438c329888ab5e85048c62fa5 | 7,621 | exs | Elixir | test/skeleton/elasticsearch_test.exs | gVirtu/skeleton_elasticsearch | b86d0329c28a06fa7aa736c233ac19d828c72b71 | [
"MIT"
] | null | null | null | test/skeleton/elasticsearch_test.exs | gVirtu/skeleton_elasticsearch | b86d0329c28a06fa7aa736c233ac19d828c72b71 | [
"MIT"
] | null | null | null | test/skeleton/elasticsearch_test.exs | gVirtu/skeleton_elasticsearch | b86d0329c28a06fa7aa736c233ac19d828c72b71 | [
"MIT"
] | null | null | null | defmodule Skeleton.ElasticsearchTest do
use Skeleton.Query.TestCase
import Skeleton.App.Elasticsearch
alias Skeleton.App.{User, UserIndex}
describe "index" do
setup ctx do
data = %{
settings: %{index: %{number_of_shards: 1, number_of_replicas: 0}},
mappings: %{properties: %{name: %{type: "keyword"}}}
}
create_index("products", data)
ctx
end
test "get_index" do
{:ok, %{"skeleton_elasticsearch-products-test" => index}} = get_index("products")
assert index["settings"]["index"]["number_of_replicas"] == "0"
assert index["settings"]["index"]["number_of_shards"] == "1"
assert index["mappings"]["properties"]["name"]["type"] == "keyword"
end
test "update_index" do
data = %{properties: %{color: %{type: "keyword"}}}
update_index("products", data, [])
{:ok, %{"skeleton_elasticsearch-products-test" => index}} = get_index("products")
assert index["settings"]["index"]["number_of_replicas"] == "0"
assert index["settings"]["index"]["number_of_shards"] == "1"
assert index["mappings"]["properties"]["name"]["type"] == "keyword"
assert index["mappings"]["properties"]["color"]["type"] == "keyword"
end
# TODO: Verificar problema quando colocamos nome do indice sem o uso do *
test "truncate_index" do
create_document("products", 123, %{name: "product name"})
[res] = search("products", %{query: %{match_all: %{}}})["hits"]["hits"]
assert res["_id"] == "123"
truncate_index("*products*")
{:ok, %{"skeleton_elasticsearch-products-test" => index}} = get_index("products")
assert index["settings"]["index"]["number_of_replicas"] == "0"
assert index["settings"]["index"]["number_of_shards"] == "1"
assert index["mappings"]["properties"]["name"]["type"] == "keyword"
assert search("products", %{query: %{match_all: %{}}})["hits"]["total"]["value"] == 0
assert search("schema_migrations", %{query: %{match_all: %{}}})["hits"]["total"]["value"] ==
1
end
test "drop_index" do
{:ok, %{"skeleton_elasticsearch-products-test" => _}} = get_index("products")
drop_index("*products*", [])
assert {:error, "no such index [skeleton_elasticsearch-products-test]"} =
get_index("products")
end
end
describe "documents" do
setup ctx do
user = create_user()
ctx |> Map.put(:user, user)
end
test "create document" do
{:ok, _user} = create_document("users", "123", %User{name: "user test"})
[res] = search("users", %{query: %{term: %{_id: 123}}})["hits"]["hits"]
assert res["_source"]["name"] == "user test"
end
test "update document", ctx do
{:ok, _user} = update_document("users", ctx.user.id, %{ctx.user | name: "user updated"})
[res] = search("users", %{query: %{term: %{_id: ctx.user.id}}})["hits"]["hits"]
assert res["_source"]["name"] == "user updated"
assert res["_source"]["email"] == ctx.user.email
end
test "update partial document", ctx do
{:ok, _user} = update_document("users", ctx.user.id, %{name: "user updated"})
[res] = search("users", %{query: %{term: %{_id: ctx.user.id}}})["hits"]["hits"]
assert res["_source"]["name"] == "user updated"
assert res["_source"]["email"] == ctx.user.email
end
test "update document by script", ctx do
data = %{
source: """
ctx._source.name = params.name;
""",
lang: "painless",
params: %{
name: "name updated"
}
}
{:ok, _user} = update_document_by_script("users", ctx.user.id, data)
[res] = search("users", %{query: %{term: %{_id: ctx.user.id}}})["hits"]["hits"]
assert res["_source"]["name"] == "name updated"
assert res["_source"]["email"] == ctx.user.email
end
test "update documents by query", ctx do
term = %{term: %{_id: ctx.user.id}}
script = %{
source: """
ctx._source = params;
""",
lang: "painless",
params: %{ctx.user | name: "name updated"}
}
{:ok, _user} = update_documents_by_query("users", term, script)
[res] = search("users", %{query: term})["hits"]["hits"]
assert res["_source"]["name"] == "name updated"
assert res["_source"]["email"] == ctx.user.email
end
test "delete document", ctx do
{:ok, _user} = delete_document("users", ctx.user.id)
assert [] = search("users", %{query: %{term: %{_id: ctx.user.id}}})["hits"]["hits"]
end
test "delete documents by query", ctx do
term = %{term: %{_id: ctx.user.id}}
{:ok, _user} = delete_documents_by_query("users", term)
assert [] = search("users", %{query: term})["hits"]["hits"]
end
end
describe "bulk" do
test "bulk" do
data = [
%{index: %{_id: "1"}},
%{name: "user 1"},
%{index: %{_id: "2"}},
%{name: "user 2"}
]
bulk("users", data)
[res] = search("users", %{query: %{term: %{_id: 1}}})["hits"]["hits"]
assert res["_source"]["name"] == "user 1"
[res] = search("users", %{query: %{term: %{_id: 2}}})["hits"]["hits"]
assert res["_source"]["name"] == "user 2"
end
test "sync" do
user1 = create_user(name: "User 1")
user2 = create_user(name: "User 2")
create_user(name: "User 3")
:ok = sync("users", User, [], :id, &%{&1 | name: "#{&1.name} updated"})
users = search("users", %{query: %{match_all: %{}}})["hits"]["hits"]
assert Enum.all?(users, fn user -> String.contains?(user["_source"]["name"], "updated") end)
assert length(users) == 3
Repo.delete(user1)
:ok = sync("users", User, [], :id, & &1)
users = search("users", %{query: %{match_all: %{}}})["hits"]["hits"]
assert hd(users)["_source"]["name"] == user2.name
assert length(users) == 2
end
test "sync without deleting outdated" do
user1 = create_user(name: "User 1")
create_user(name: "User 2")
create_user(name: "User 3")
:ok = sync("users", User, [], :id, & &1, [], delete_outdated: false)
users = search("users", %{query: %{match_all: %{}}})["hits"]["hits"]
assert length(users) == 3
Repo.delete(user1)
:ok = sync("users", User, [], :id, & &1, [], delete_outdated: false)
users = search("users", %{query: %{match_all: %{}}})["hits"]["hits"]
assert length(users) == 3
end
end
describe "search" do
test "search" do
user1 = create_user(name: "User 1")
create_user(name: "User 2")
[res] = search("users", %{query: %{term: %{_id: user1.id}}})["hits"]["hits"]
assert res["_source"]["name"] == "User 1"
end
end
describe "refres" do
test "refresh" do
{:ok, %{status_code: 200}} = refresh("users", force: true)
{:ok, %{status_code: 404}} = refresh("inexistent", force: true)
end
end
defp create_user(params \\ %{}) do
user =
%User{
id: params[:id],
name: params[:name] || "Name #{params[:id]}",
email: "email-#{params[:id]}@email.com",
admin: false
}
|> change(params)
|> Repo.insert!()
UserIndex.create(user)
user
end
describe "Index name" do
test "returns index name without prefix" do
assert index_name("users") == "skeleton_elasticsearch-users-test"
end
test "returns index name with prefix" do
assert index_name("users", prefix: "tenant") == "skeleton_elasticsearch-tenant-users-test"
end
end
end
| 30.484 | 98 | 0.559507 |
9eb61d59ee4be2ef3372b50de7469dea9dd2c631 | 4,950 | ex | Elixir | lib/google/rpc/error_details.pb.ex | coingaming/grpc | 33b8b3ceec007ae530a33b49a7b302a1ee356b05 | [
"Apache-2.0"
] | null | null | null | lib/google/rpc/error_details.pb.ex | coingaming/grpc | 33b8b3ceec007ae530a33b49a7b302a1ee356b05 | [
"Apache-2.0"
] | null | null | null | lib/google/rpc/error_details.pb.ex | coingaming/grpc | 33b8b3ceec007ae530a33b49a7b302a1ee356b05 | [
"Apache-2.0"
] | 1 | 2022-01-19T22:21:04.000Z | 2022-01-19T22:21:04.000Z | defmodule Google.Rpc.RetryInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
retry_delay: Google.Protobuf.Duration.t() | nil
}
defstruct [:retry_delay]
field :retry_delay, 1, type: Google.Protobuf.Duration
end
defmodule Google.Rpc.DebugInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
stack_entries: [String.t()],
detail: String.t()
}
defstruct [:stack_entries, :detail]
field :stack_entries, 1, repeated: true, type: :string
field :detail, 2, type: :string
end
defmodule Google.Rpc.QuotaFailure.Violation do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
subject: String.t(),
description: String.t()
}
defstruct [:subject, :description]
field :subject, 1, type: :string
field :description, 2, type: :string
end
defmodule Google.Rpc.QuotaFailure do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
violations: [Google.Rpc.QuotaFailure.Violation.t()]
}
defstruct [:violations]
field :violations, 1, repeated: true, type: Google.Rpc.QuotaFailure.Violation
end
defmodule Google.Rpc.ErrorInfo.MetadataEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t(),
value: String.t()
}
defstruct [:key, :value]
field :key, 1, type: :string
field :value, 2, type: :string
end
defmodule Google.Rpc.ErrorInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
reason: String.t(),
domain: String.t(),
metadata: %{String.t() => String.t()}
}
defstruct [:reason, :domain, :metadata]
field :reason, 1, type: :string
field :domain, 2, type: :string
field :metadata, 3, repeated: true, type: Google.Rpc.ErrorInfo.MetadataEntry, map: true
end
defmodule Google.Rpc.PreconditionFailure.Violation do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
type: String.t(),
subject: String.t(),
description: String.t()
}
defstruct [:type, :subject, :description]
field :type, 1, type: :string
field :subject, 2, type: :string
field :description, 3, type: :string
end
defmodule Google.Rpc.PreconditionFailure do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
violations: [Google.Rpc.PreconditionFailure.Violation.t()]
}
defstruct [:violations]
field :violations, 1, repeated: true, type: Google.Rpc.PreconditionFailure.Violation
end
defmodule Google.Rpc.BadRequest.FieldViolation do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
field: String.t(),
description: String.t()
}
defstruct [:field, :description]
field :field, 1, type: :string
field :description, 2, type: :string
end
defmodule Google.Rpc.BadRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
field_violations: [Google.Rpc.BadRequest.FieldViolation.t()]
}
defstruct [:field_violations]
field :field_violations, 1, repeated: true, type: Google.Rpc.BadRequest.FieldViolation
end
defmodule Google.Rpc.RequestInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
request_id: String.t(),
serving_data: String.t()
}
defstruct [:request_id, :serving_data]
field :request_id, 1, type: :string
field :serving_data, 2, type: :string
end
defmodule Google.Rpc.ResourceInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
resource_type: String.t(),
resource_name: String.t(),
owner: String.t(),
description: String.t()
}
defstruct [:resource_type, :resource_name, :owner, :description]
field :resource_type, 1, type: :string
field :resource_name, 2, type: :string
field :owner, 3, type: :string
field :description, 4, type: :string
end
defmodule Google.Rpc.Help.Link do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
description: String.t(),
url: String.t()
}
defstruct [:description, :url]
field :description, 1, type: :string
field :url, 2, type: :string
end
defmodule Google.Rpc.Help do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
links: [Google.Rpc.Help.Link.t()]
}
defstruct [:links]
field :links, 1, repeated: true, type: Google.Rpc.Help.Link
end
defmodule Google.Rpc.LocalizedMessage do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
locale: String.t(),
message: String.t()
}
defstruct [:locale, :message]
field :locale, 1, type: :string
field :message, 2, type: :string
end
| 22.197309 | 89 | 0.650101 |
9eb61ee78c72c899e8cabe77a11c4b2347c8c1bc | 2,527 | exs | Elixir | config/dev.exs | runhyve/webapp | 434b074f98c1ebac657b56062c1c1a54e683dea1 | [
"BSD-2-Clause"
] | 12 | 2019-07-02T14:30:06.000Z | 2022-03-12T08:22:18.000Z | config/dev.exs | runhyve/webapp | 434b074f98c1ebac657b56062c1c1a54e683dea1 | [
"BSD-2-Clause"
] | 9 | 2020-03-16T20:10:50.000Z | 2021-06-17T17:45:44.000Z | config/dev.exs | runhyve/webapp | 434b074f98c1ebac657b56062c1c1a54e683dea1 | [
"BSD-2-Clause"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :webapp, WebappWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :webapp, WebappWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/webapp_web/views/.*(ex)$},
~r{lib/webapp_web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger,
:console, format: "[$level] $message\n",
level: System.get_env("WEBAPP_LOGLEVEL") || :debug
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
# Configure your database
config :webapp, Webapp.Repo,
database: System.get_env("POSTGRES_DB") || "webapp_dev",
username: System.get_env("POSTGRES_USER") || "postgres",
password: System.get_env("POSTGRES_PASSWORD") || "postgres",
hostname: System.get_env("POSTGRES_HOST") || "localhost",
pool_size: 10
config :sentry,
dsn: System.get_env("SENTRY_DSN") || "https://[email protected]/account_id",
environment_name: :dev,
enable_source_code_context: true,
root_source_code_path: File.cwd!,
tags: %{
env: "dev"
},
included_environments: [:dev] | 29.045977 | 77 | 0.688959 |
9eb63bab7655068b9ecdfa822fd3305bf5ec6bf1 | 1,452 | exs | Elixir | benchs/decompress_bench.exs | sezaru/r_zstd | a4b308b37df2f5212a5ed4dd077b3ecdebbd9726 | [
"MIT"
] | 2 | 2021-04-28T17:20:44.000Z | 2021-12-21T08:18:41.000Z | benchs/decompress_bench.exs | sezaru/rzstd | a4b308b37df2f5212a5ed4dd077b3ecdebbd9726 | [
"MIT"
] | null | null | null | benchs/decompress_bench.exs | sezaru/rzstd | a4b308b37df2f5212a5ed4dd077b3ecdebbd9726 | [
"MIT"
] | 1 | 2021-11-12T16:24:39.000Z | 2021-11-12T16:24:39.000Z | data = """
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
"""
dict = File.read!(Path.join(__DIR__, "dict.zst"))
compressed_data = RZstd.compress(data, 5)
load_decompressor = fn _ -> RZstd.Decompressor.new() end
load_decompressor_with_dict = fn _ -> RZstd.Decompressor.with_dict(dict) end
Benchee.run(%{
"decompress/1" => fn -> RZstd.decompress(compressed_data) end,
"decompress_dirty/1" => fn -> RZstd.decompress(compressed_data) end,
"decompress/2" =>
{fn decompressor -> RZstd.decompress(compressed_data, decompressor) end,
before_scenario: load_decompressor},
"decompress_dirty/2" =>
{fn decompressor -> RZstd.decompress(compressed_data, decompressor) end,
before_scenario: load_decompressor},
"decompress/2 with dict" =>
{fn decompressor -> RZstd.decompress(compressed_data, decompressor) end,
before_scenario: load_decompressor_with_dict},
"decompress_dirty/2 with dict" =>
{fn decompressor -> RZstd.decompress(compressed_data, decompressor) end,
before_scenario: load_decompressor_with_dict}
})
| 51.857143 | 445 | 0.764463 |
9eb645ae416404d3335c2ee700bd79a3509db89f | 3,010 | exs | Elixir | apps/robby_web/mix.exs | puppetlabs/openrobby | a4b70939ee1b878d44cb09d757b7f72e7109ac5d | [
"Apache-2.0"
] | 3 | 2021-04-16T21:54:55.000Z | 2021-04-30T22:15:41.000Z | apps/robby_web/mix.exs | puppetlabs/openrobby | a4b70939ee1b878d44cb09d757b7f72e7109ac5d | [
"Apache-2.0"
] | 1 | 2021-06-29T15:54:19.000Z | 2021-06-29T15:54:19.000Z | apps/robby_web/mix.exs | puppetlabs/openrobby | a4b70939ee1b878d44cb09d757b7f72e7109ac5d | [
"Apache-2.0"
] | 2 | 2021-04-16T22:23:16.000Z | 2021-05-26T15:52:55.000Z | defmodule RobbyWeb.Mixfile do
use Mix.Project
def project do
[
app: :robby_web,
version: get_version(),
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.1",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
dialyzer: [
plt_add_deps: true,
plt_file: ".local.plt",
flags: []
],
deps: deps()
]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[
mod: {RobbyWeb, []},
applications: [
:phoenix,
:phoenix_html,
:cowboy,
:logger,
:gettext,
:bbmustache,
:erlware_commons,
:exactor,
:getopt,
:ibrowse,
:providers,
:phoenix_ecto,
:postgrex,
:con_cache,
:eldap,
:earmark,
:conform,
:mailman,
:eiconv,
:observer,
:runtime_tools,
:logger_file_backend,
:db_connection,
:ldap_search,
:ldap_write,
:sms_code,
:ecto_ldap,
:timex_ecto,
:sweet_xml,
:timex,
:tzdata,
:httpoison,
:mogrify,
:ex_aws,
:ex_aws_s3
]
]
end
defp get_version do
case System.cmd("git", ["describe", "--always", "--tags"]) do
{output, 0} -> String.trim(output)
end
end
# Specifies which paths to compile per environment
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies
#
# Type `mix help deps` for examples and options
defp deps do
[
{:phoenix, "~> 1.3"},
{:phoenix_ecto, "~> 3.3"},
{:phoenix_html, "~> 2.10"},
{:phoenix_live_reload, "~> 1.1", only: :dev},
{:postgrex, "~> 0.13"},
{:ecto, "~> 2.2"},
{:cowboy, "~> 1.0"},
{:gettext, "~> 0.14"},
{:con_cache, "~> 0.12.1"},
{:logger_file_backend, "~> 0.0.10"},
{:mailman, "~> 0.3"},
# this dependency of mailman isn't tracked down for some reason
{:eiconv, github: "zotonic/eiconv", override: true},
{:distillery, "~> 1.5", runtime: false},
{:conform, "~> 2.5"},
{:ldap_search, in_umbrella: true},
{:ldap_write, in_umbrella: true},
{:sms_code, in_umbrella: true},
{:ecto_ldap, "~> 0.4"},
{:timex, "~> 3.1"},
{:timex_ecto, "~> 3.2"},
{:plug, "~> 1.4"},
{:dialyxir, "~> 0.5", only: :dev},
{:poison, "~> 3.1", override: true},
{:erlware_commons, "~> 1.0"},
{:getopt, "~> 1.0"},
{:bbmustache, "~> 1.5"},
{:providers, "~> 1.7"},
{:mogrify, "~> 0.5"},
{:ex_aws, "~> 2.0"},
{:ex_aws_s3, "~> 2.0"},
{:hackney, "~> 1.9"},
{:sweet_xml, "~> 0.6"}
]
end
end
| 24.876033 | 69 | 0.495681 |
9eb67a46cf07f1c44c41321d28e688ae0b564850 | 2,722 | ex | Elixir | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_v1beta3_document_page_anchor_page_ref.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_v1beta3_document_page_anchor_page_ref.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_v1beta3_document_page_anchor_page_ref.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3DocumentPageAnchorPageRef do
@moduledoc """
Represents a weak reference to a page element within a document.
## Attributes
* `boundingPoly` (*type:* `GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3BoundingPoly.t`, *default:* `nil`) - Optional. Identifies the bounding polygon of a layout element on the page.
* `confidence` (*type:* `number()`, *default:* `nil`) - Optional. Confidence of detected page element, if applicable. Range [0, 1].
* `layoutId` (*type:* `String.t`, *default:* `nil`) - Optional. Deprecated. Use PageRef.bounding_poly instead.
* `layoutType` (*type:* `String.t`, *default:* `nil`) - Optional. The type of the layout element that is being referenced if any.
* `page` (*type:* `String.t`, *default:* `nil`) - Required. Index into the Document.pages element, for example using Document.pages to locate the related page element.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:boundingPoly =>
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3BoundingPoly.t() | nil,
:confidence => number() | nil,
:layoutId => String.t() | nil,
:layoutType => String.t() | nil,
:page => String.t() | nil
}
field(:boundingPoly,
as: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3BoundingPoly
)
field(:confidence)
field(:layoutId)
field(:layoutType)
field(:page)
end
defimpl Poison.Decoder,
for: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3DocumentPageAnchorPageRef do
def decode(value, options) do
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3DocumentPageAnchorPageRef.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3DocumentPageAnchorPageRef do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.029412 | 205 | 0.728508 |
9eb69f897ba42ee38b40e94ac0ff046264cef703 | 1,960 | ex | Elixir | apps/omg_eth/lib/omg_eth/encoding.ex | kendricktan/elixir-omg | 834c103fd5c4b9e063c1d32b9b4e5728abb64009 | [
"Apache-2.0"
] | null | null | null | apps/omg_eth/lib/omg_eth/encoding.ex | kendricktan/elixir-omg | 834c103fd5c4b9e063c1d32b9b4e5728abb64009 | [
"Apache-2.0"
] | null | null | null | apps/omg_eth/lib/omg_eth/encoding.ex | kendricktan/elixir-omg | 834c103fd5c4b9e063c1d32b9b4e5728abb64009 | [
"Apache-2.0"
] | 2 | 2020-06-07T11:14:54.000Z | 2020-08-02T07:36:32.000Z | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Eth.Encoding do
@moduledoc """
Internal encoding helpers to talk to ethereum.
For use in `OMG.Eth` and `OMG.Eth.DevHelper`
"""
@doc """
Ethereum JSONRPC and Ethereumex' specific encoding and decoding of binaries and ints
We are enforcing the users of Eth and Eth.<Contract> APIs to always use integers and raw decoded binaries,
when interacting.
Configuration entries are expected to be written in "0xhex-style"
"""
@spec to_hex(binary | non_neg_integer) :: binary
def to_hex(non_hex)
def to_hex(raw) when is_binary(raw), do: "0x" <> Base.encode16(raw, case: :lower)
def to_hex(int) when is_integer(int), do: "0x" <> Integer.to_string(int, 16)
@doc """
Decodes to a raw binary, see `to_hex`
"""
# because https://github.com/rrrene/credo/issues/583, we need to:
# credo:disable-for-next-line Credo.Check.Consistency.SpaceAroundOperators
@spec from_hex(<<_::16, _::_*8>>) :: binary
def from_hex("0x" <> encoded), do: Base.decode16!(encoded, case: :lower)
@doc """
Decodes to an integer, see `to_hex`
"""
# because https://github.com/rrrene/credo/issues/583, we need to:
# credo:disable-for-next-line Credo.Check.Consistency.SpaceAroundOperators
@spec int_from_hex(<<_::16, _::_*8>>) :: non_neg_integer
def int_from_hex("0x" <> encoded) do
{return, ""} = Integer.parse(encoded, 16)
return
end
end
| 36.296296 | 108 | 0.715306 |
9eb7001cbe9a0481615982a3b9109bf7b7d06900 | 1,332 | exs | Elixir | apps/merkle_patricia_tree/test/merkle_patricia_tree_test.exs | atoulme/mana | cff3fd96c23feaaeb9fe32df3c0d35ee6dc548a5 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | apps/merkle_patricia_tree/test/merkle_patricia_tree_test.exs | atoulme/mana | cff3fd96c23feaaeb9fe32df3c0d35ee6dc548a5 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | apps/merkle_patricia_tree/test/merkle_patricia_tree_test.exs | atoulme/mana | cff3fd96c23feaaeb9fe32df3c0d35ee6dc548a5 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | defmodule MerklePatriciaTreeTest do
use ExUnit.Case
alias ExthCrypto.Math
alias MerklePatriciaTree.Trie
@ethereum_common_tests_path "../../ethereum_common_tests"
@passing_tests %{
anyorder: :all,
test: :all
}
test "Ethereum Common Tests" do
for {test_type, test_group} <- @passing_tests do
for {test_name, test} <- read_test_file(test_type),
test_group == :all or Enum.member?(test_group, String.to_atom(test_name)) do
db = MerklePatriciaTree.Test.random_ets_db()
test_in = test["in"]
input =
if is_map(test_in) do
test_in
|> Enum.into([])
|> Enum.map(fn {a, b} -> [a, b] end)
|> Enum.shuffle()
else
test_in
end
trie =
Enum.reduce(input, Trie.new(db), fn [k, v], trie ->
Trie.update(trie, hex_to_bin(k), hex_to_bin(v))
end)
assert trie.root_hash == hex_to_bin(test["root"])
end
end
end
def hex_to_bin(hex = "0x" <> _str), do: Math.hex_to_bin(hex)
def hex_to_bin(x), do: x
def read_test_file(type) do
{:ok, body} = File.read(test_file_name(type))
Poison.decode!(body)
end
def test_file_name(type) do
"#{@ethereum_common_tests_path}/TrieTests/trie#{Atom.to_string(type)}.json"
end
end
| 25.615385 | 86 | 0.600601 |
9eb7063af35b49079d391be7ba1ea6baa5e62eec | 1,266 | ex | Elixir | lib/rumbl/info_sys/wolfram.ex | GArmane/rumbl | 6dbc0c4515601b7fdfe2bed54ef39abfcd8bcb5e | [
"MIT"
] | null | null | null | lib/rumbl/info_sys/wolfram.ex | GArmane/rumbl | 6dbc0c4515601b7fdfe2bed54ef39abfcd8bcb5e | [
"MIT"
] | 2 | 2021-03-09T19:04:16.000Z | 2021-05-10T16:20:10.000Z | lib/rumbl/info_sys/wolfram.ex | GArmane/rumbl | 6dbc0c4515601b7fdfe2bed54ef39abfcd8bcb5e | [
"MIT"
] | 1 | 2020-07-17T14:48:52.000Z | 2020-07-17T14:48:52.000Z | defmodule Rumbl.InfoSys.Wolfram do
import SweetXml
alias Rumbl.InfoSys.Result
defp send_results(nil, query_ref, owner) do
send(owner, {:results, query_ref, []})
end
defp send_results(answer, query_ref, owner) do
results = [
%Result{
backend: "wolfram",
score: 95,
text: to_string(answer),
},
]
send(owner, {:results, query_ref, results})
end
@http Application.get_env(:rumbl, :wolfram)[:http_client] || :httpc
defp fetch_xml(query_str) do
{:ok, {_, _, body}} = @http.request(
String.to_charlist(
"http://api.wolframalpha.com/v2/query" <>
"?appid=#{app_id()}" <>
"&input=#{URI.encode(query_str)}&format=plaintext"
)
)
body
end
defp app_id, do: Application.get_env(:rumbl, :wolfram)[:app_id]
def start_link(query, query_ref, owner, limit) do
Task.start_link(__MODULE__, :fetch, [query, query_ref, owner, limit])
end
def fetch(query_str, query_ref, owner, _limit) do
query_str
|> fetch_xml()
|> xpath(~x"/queryresult/pod[contains(@title, 'Result') or
contains(@title, 'Definitions')]
/subpod/plaintext/text()")
|> send_results(query_ref, owner)
end
end
| 26.375 | 73 | 0.605845 |
9eb7198d32545021aa02a72095a0921b02588600 | 196 | exs | Elixir | priv/repo/migrations/20180405122434_create_teachers.exs | nliechti/termDirectory | 267b3025c14e26575c7a9483692e94a7bf29e5fe | [
"MIT"
] | 1 | 2019-03-15T15:40:24.000Z | 2019-03-15T15:40:24.000Z | priv/repo/migrations/20180405122434_create_teachers.exs | nliechti/termDirectory | 267b3025c14e26575c7a9483692e94a7bf29e5fe | [
"MIT"
] | null | null | null | priv/repo/migrations/20180405122434_create_teachers.exs | nliechti/termDirectory | 267b3025c14e26575c7a9483692e94a7bf29e5fe | [
"MIT"
] | null | null | null | defmodule TermDirectory.Repo.Migrations.CreateTeachers do
use Ecto.Migration
def change do
create table(:teachers) do
add :firstName, :string
timestamps()
end
end
end
| 15.076923 | 57 | 0.69898 |
9eb733d06f50aa8e0747e75667dba2b510b5e494 | 6,434 | ex | Elixir | lib/exdis/database.ex | g-andrade/exdis | 6cb6775b16a237718f10dc5177d9369279e2d528 | [
"MIT"
] | 2 | 2020-03-20T17:25:59.000Z | 2020-03-22T14:17:30.000Z | lib/exdis/database.ex | g-andrade/exdis | 6cb6775b16a237718f10dc5177d9369279e2d528 | [
"MIT"
] | null | null | null | lib/exdis/database.ex | g-andrade/exdis | 6cb6775b16a237718f10dc5177d9369279e2d528 | [
"MIT"
] | null | null | null | defmodule Exdis.Database do
require Logger
require Record
## ------------------------------------------------------------------
## Macro-like Attribute Definitions
## ------------------------------------------------------------------
@server __MODULE__
## ------------------------------------------------------------------
## Type and Record Definitions
## ------------------------------------------------------------------
Record.defrecord(:state,
key_owners: %{},
pids_to_keys: %{},
global_committer_pid: nil,
lock_epoch: nil
)
## ------------------------------------------------------------------
## API Functions
## ------------------------------------------------------------------
def child_spec([]) do
%{
id: @server,
start: {__MODULE__, :start_link, []}
}
end
def start_link() do
:proc_lib.start_link(__MODULE__, :init, [self()])
end
def pid_and_monitor() do
pid = Process.whereis(@server)
monitor = Process.monitor(pid)
{:ok, pid, monitor}
end
def async_lock_keys(pid, monitor, ref, keys, streams_reader \\ :lock_owner) do
reverse_keys = Enum.reverse(keys)
call(pid, monitor, &dispatch_key_lock_requests(&1, &2, ref, streams_reader, reverse_keys))
end
## ------------------------------------------------------------------
## OTP Process Functions
## ------------------------------------------------------------------
# https://gist.github.com/marcelog/97708058cd17f86326c82970a7f81d40#file-simpleproc-erl
def init(parent) do
_ = Process.flag(:trap_exit, true)
debug = :sys.debug_options([])
Process.register(self(), @server)
{:ok, global_committer_pid, committed_epoch} = Exdis.GlobalCommitter.start_link()
state = state(global_committer_pid: global_committer_pid, lock_epoch: committed_epoch + 1)
:proc_lib.init_ack(parent, {:ok, self()})
loop(parent, debug, state)
end
def write_debug(dev, event, name) do
:io.format(dev, '~p event = ~p~n', [name, event])
end
def system_continue(parent, debug, state) do
loop(parent, debug, state)
end
def system_terminate(reason, _parent, _debug, _state) do
exit(reason)
end
def system_code_change(state() = state, _module, _old_vsn, _extra) do
{:ok, state}
end
## ------------------------------------------------------------------
## Private Functions
## ------------------------------------------------------------------
defp call(pid, monitor, handler) do
reply_ref = monitor
send(pid, {:call, self(), reply_ref, handler})
receive do
{^reply_ref, reply} ->
reply
{:"DOWN", ^reply_ref, _, _, reason} ->
exit({reason, {__MODULE__, :call, [pid, monitor, handler]}})
end
end
defp loop(parent, debug, state) do
receive do
msg ->
handle_msg(parent, debug, state, msg)
end
end
defp handle_msg(parent, debug, state, msg) do
case msg do
{:call, reply_pid, reply_ref, handler} ->
{reply, state} = handler.(state, reply_pid)
send(reply_pid, {reply_ref, reply})
loop(parent, debug, state)
{:system, from, request} ->
:sys.handle_system_msg(request, from, parent, __MODULE__, debug, state)
{:"EXIT", pid, reason} ->
state = deal_with_linked_process_death(parent, state, pid, reason)
loop(parent, debug, state)
end
end
## ------------------------------------------------------------------
## Private Functions - Lock Keys
## ------------------------------------------------------------------
defp dispatch_key_lock_requests(state, pid, ref, streams_reader, keys) do
state(key_owners: key_owners, lock_epoch: epoch) = state
dispatch_key_lock_requests_recur(state, key_owners, epoch, pid, ref, streams_reader, keys, [])
end
defp dispatch_key_lock_requests_recur(
state, key_owners, epoch, pid, ref, streams_reader, [key | next_keys], reverse_notified_pids)
do
case Map.get(key_owners, key) do
key_owner_pid when is_pid(key_owner_pid) ->
Exdis.Database.KeyOwner.async_lock(key_owner_pid, epoch, pid, ref, streams_reader)
reverse_notified_pids = [key_owner_pid | reverse_notified_pids]
dispatch_key_lock_requests_recur(
state, key_owners, epoch, pid, ref, streams_reader, next_keys, reverse_notified_pids)
nil ->
state(pids_to_keys: pids_to_keys) = state
{:ok, key_owner_pid} = Exdis.Database.KeyOwner.start_link_and_lock(epoch, pid, ref, streams_reader)
key_owners = Map.put(key_owners, key, key_owner_pid)
pids_to_keys = Map.put(pids_to_keys, key_owner_pid, key)
state = state(state, key_owners: key_owners, pids_to_keys: pids_to_keys)
reverse_notified_pids = [key_owner_pid | reverse_notified_pids]
dispatch_key_lock_requests_recur(
state, key_owners, epoch, pid, ref, streams_reader, next_keys, reverse_notified_pids)
end
end
defp dispatch_key_lock_requests_recur(
state, _key_owners, epoch, _pid, ref, _streams_reader, [], reverse_notified_pids)
do
state(global_committer_pid: global_committer_pid) = state
Exdis.GlobalCommitter.notify_of_new_key_locks(global_committer_pid, epoch, ref, reverse_notified_pids)
reply = reverse_notified_pids # to be reversed on caller
state = state(state, lock_epoch: epoch + 1)
{reply, state}
end
## ------------------------------------------------------------------
## Private Functions - Death of Linked Processes
## ------------------------------------------------------------------
defp deal_with_linked_process_death(parent, _state, pid, reason) when pid === parent do
exit(reason)
end
defp deal_with_linked_process_death(_parent, state, pid, reason) do
case state do
state(pids_to_keys: %{^pid => key} = pids_to_keys, key_owners: key_owners) ->
Logger.warn("Owner #{inspect pid} of key #{inspect key} has stopped unexpectedly")
state(global_committer_pid: global_committer_pid) = state
Exdis.GlobalCommitter.notify_of_key_owner_death(global_committer_pid, pid)
pids_to_keys = Map.delete(pids_to_keys, pid)
key_owners = Map.delete(key_owners, key)
state(state, key_owners: key_owners, pids_to_keys: pids_to_keys)
state(global_committer_pid: ^pid) ->
exit({:global_committer_stopped, %{pid: pid, reason: reason}})
end
end
end
| 35.744444 | 107 | 0.58253 |
9eb73c3585db28d0840743e6299e2546ae976a6b | 687 | exs | Elixir | exercise_1.43.exs | bschmeck/sicp-elixir | 4e6f959f506d0cb4e2692177cbdad1c87779cf7d | [
"MIT"
] | 2 | 2019-08-26T13:17:27.000Z | 2020-09-24T13:16:07.000Z | exercise_1.43.exs | bschmeck/sicp-elixir | 4e6f959f506d0cb4e2692177cbdad1c87779cf7d | [
"MIT"
] | null | null | null | exercise_1.43.exs | bschmeck/sicp-elixir | 4e6f959f506d0cb4e2692177cbdad1c87779cf7d | [
"MIT"
] | null | null | null | defmodule Composition do
def of(f, g), do: fn x -> f.(g.(x)) end
end
defmodule Repeat do
def n_times(f, 1), do: f
def n_times(f, n) when rem(n, 2) == 0 do
g = n_times(f, div(n, 2))
Composition.of(g, g)
end
def n_times(f, n), do: fn x -> f.(n_times(f, n - 1).(x)) end
end
ExUnit.start
defmodule RepeatTests do
use ExUnit.Case, async: true
test "it repeats function application" do
square = &(&1 * &1)
assert Repeat.n_times(square, 2).(5) == 625
end
test "it repeats more than twice" do
inc = &(&1 + 1)
assert Repeat.n_times(inc, 4).(5) == 9
assert Repeat.n_times(inc, 10).(5) == 15
assert Repeat.n_times(inc, 27).(5) == 32
end
end
| 22.16129 | 62 | 0.60262 |
9eb755693f9807ac5e9fd45e45ac2f48c233ca23 | 1,584 | ex | Elixir | clients/storage/lib/google_api/storage/v1/model/notifications.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/storage/lib/google_api/storage/v1/model/notifications.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/storage/lib/google_api/storage/v1/model/notifications.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Storage.V1.Model.Notifications do
@moduledoc """
A list of notification subscriptions.
## Attributes
- items ([Notification]): The list of items. Defaults to: `null`.
- kind (String.t): The kind of item this is. For lists of notifications, this is always storage#notifications. Defaults to: `null`.
"""
defstruct [
:items,
:kind
]
end
defimpl Poison.Decoder, for: GoogleApi.Storage.V1.Model.Notifications do
import GoogleApi.Storage.V1.Deserializer
def decode(value, options) do
value
|> deserialize(:items, :list, GoogleApi.Storage.V1.Model.Notification, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Storage.V1.Model.Notifications do
def encode(value, options) do
GoogleApi.Storage.V1.Deserializer.serialize_non_nil(value, options)
end
end
| 32.326531 | 133 | 0.748106 |
9eb75a83fc23778d5d5e12f14e53ac7759b9f7e5 | 11,144 | ex | Elixir | clients/sql_admin/lib/google_api/sql_admin/v1/model/database_instance.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/sql_admin/lib/google_api/sql_admin/v1/model/database_instance.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/sql_admin/lib/google_api/sql_admin/v1/model/database_instance.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SQLAdmin.V1.Model.DatabaseInstance do
@moduledoc """
A Cloud SQL instance resource.
## Attributes
* `ipv6Address` (*type:* `String.t`, *default:* `nil`) - The IPv6 address assigned to the instance. (Deprecated) This property was applicable only to First Generation instances.
* `etag` (*type:* `String.t`, *default:* `nil`) - This field is deprecated and will be removed from a future version of the API. Use the `settings.settingsVersion` field instead.
* `databaseVersion` (*type:* `String.t`, *default:* `nil`) - The database engine type and version. The `databaseVersion` field cannot be changed after instance creation.
* `serverCaCert` (*type:* `GoogleApi.SQLAdmin.V1.Model.SslCert.t`, *default:* `nil`) - SSL configuration.
* `suspensionReason` (*type:* `list(String.t)`, *default:* `nil`) - If the instance state is SUSPENDED, the reason for the suspension.
* `maintenanceVersion` (*type:* `String.t`, *default:* `nil`) - The current software version on the instance.
* `serviceAccountEmailAddress` (*type:* `String.t`, *default:* `nil`) - The service account email address assigned to the instance.\\This property is read-only.
* `masterInstanceName` (*type:* `String.t`, *default:* `nil`) - The name of the instance which will act as primary in the replication setup.
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The time when the instance was created in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example `2012-11-15T16:19:00.094Z`.
* `satisfiesPzs` (*type:* `boolean()`, *default:* `nil`) - The status indicating if instance satisfiesPzs. Reserved for future use.
* `databaseInstalledVersion` (*type:* `String.t`, *default:* `nil`) - Output only. Stores the current database version running on the instance including minor version such as `MYSQL_8_0_18`.
* `kind` (*type:* `String.t`, *default:* `nil`) - This is always `sql#instance`.
* `gceZone` (*type:* `String.t`, *default:* `nil`) - The Compute Engine zone that the instance is currently serving from. This value could be different from the zone that was specified when the instance was created if the instance has failed over to its secondary zone. WARNING: Changing this might restart the instance.
* `project` (*type:* `String.t`, *default:* `nil`) - The project ID of the project containing the Cloud SQL instance. The Google apps domain is prefixed if applicable.
* `name` (*type:* `String.t`, *default:* `nil`) - Name of the Cloud SQL instance. This does not include the project ID.
* `state` (*type:* `String.t`, *default:* `nil`) - The current serving state of the Cloud SQL instance.
* `region` (*type:* `String.t`, *default:* `nil`) - The geographical region. Can be: * `us-central` (`FIRST_GEN` instances only) * `us-central1` (`SECOND_GEN` instances only) * `asia-east1` or `europe-west1`. Defaults to `us-central` or `us-central1` depending on the instance type. The region cannot be changed after instance creation.
* `replicaConfiguration` (*type:* `GoogleApi.SQLAdmin.V1.Model.ReplicaConfiguration.t`, *default:* `nil`) - Configuration specific to failover replicas and read replicas.
* `diskEncryptionStatus` (*type:* `GoogleApi.SQLAdmin.V1.Model.DiskEncryptionStatus.t`, *default:* `nil`) - Disk encryption status specific to an instance.
* `connectionName` (*type:* `String.t`, *default:* `nil`) - Connection name of the Cloud SQL instance used in connection strings.
* `onPremisesConfiguration` (*type:* `GoogleApi.SQLAdmin.V1.Model.OnPremisesConfiguration.t`, *default:* `nil`) - Configuration specific to on-premises instances.
* `availableMaintenanceVersions` (*type:* `list(String.t)`, *default:* `nil`) - List all maintenance versions applicable on the instance
* `instanceType` (*type:* `String.t`, *default:* `nil`) - The instance type.
* `ipAddresses` (*type:* `list(GoogleApi.SQLAdmin.V1.Model.IpMapping.t)`, *default:* `nil`) - The assigned IP addresses for the instance.
* `replicaNames` (*type:* `list(String.t)`, *default:* `nil`) - The replicas of the instance.
* `currentDiskSize` (*type:* `String.t`, *default:* `nil`) - The current disk usage of the instance in bytes. This property has been deprecated. Use the "cloudsql.googleapis.com/database/disk/bytes_used" metric in Cloud Monitoring API instead. Please see [this announcement](https://groups.google.com/d/msg/google-cloud-sql-announce/I_7-F9EBhT0/BtvFtdFeAgAJ) for details.
* `secondaryGceZone` (*type:* `String.t`, *default:* `nil`) - The Compute Engine zone that the failover instance is currently serving from for a regional instance. This value could be different from the zone that was specified when the instance was created if the instance has failed over to its secondary/failover zone.
* `settings` (*type:* `GoogleApi.SQLAdmin.V1.Model.Settings.t`, *default:* `nil`) - The user settings.
* `rootPassword` (*type:* `String.t`, *default:* `nil`) - Initial root password. Use only on creation.
* `selfLink` (*type:* `String.t`, *default:* `nil`) - The URI of this resource.
* `backendType` (*type:* `String.t`, *default:* `nil`) - The backend type. `SECOND_GEN`: Cloud SQL database instance. `EXTERNAL`: A database server that is not managed by Google. This property is read-only; use the `tier` property in the `settings` object to determine the database type.
* `scheduledMaintenance` (*type:* `GoogleApi.SQLAdmin.V1.Model.SqlScheduledMaintenance.t`, *default:* `nil`) - The start time of any upcoming scheduled maintenance for this instance.
* `maxDiskSize` (*type:* `String.t`, *default:* `nil`) - The maximum disk size of the instance in bytes.
* `failoverReplica` (*type:* `GoogleApi.SQLAdmin.V1.Model.DatabaseInstanceFailoverReplica.t`, *default:* `nil`) - The name and status of the failover replica.
* `diskEncryptionConfiguration` (*type:* `GoogleApi.SQLAdmin.V1.Model.DiskEncryptionConfiguration.t`, *default:* `nil`) - Disk encryption configuration specific to an instance.
* `outOfDiskReport` (*type:* `GoogleApi.SQLAdmin.V1.Model.SqlOutOfDiskReport.t`, *default:* `nil`) - This field represents the report generated by the proactive database wellness job for OutOfDisk issues. * Writers: * the proactive database wellness job for OOD. * Readers: * the proactive database wellness job
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:ipv6Address => String.t() | nil,
:etag => String.t() | nil,
:databaseVersion => String.t() | nil,
:serverCaCert => GoogleApi.SQLAdmin.V1.Model.SslCert.t() | nil,
:suspensionReason => list(String.t()) | nil,
:maintenanceVersion => String.t() | nil,
:serviceAccountEmailAddress => String.t() | nil,
:masterInstanceName => String.t() | nil,
:createTime => DateTime.t() | nil,
:satisfiesPzs => boolean() | nil,
:databaseInstalledVersion => String.t() | nil,
:kind => String.t() | nil,
:gceZone => String.t() | nil,
:project => String.t() | nil,
:name => String.t() | nil,
:state => String.t() | nil,
:region => String.t() | nil,
:replicaConfiguration => GoogleApi.SQLAdmin.V1.Model.ReplicaConfiguration.t() | nil,
:diskEncryptionStatus => GoogleApi.SQLAdmin.V1.Model.DiskEncryptionStatus.t() | nil,
:connectionName => String.t() | nil,
:onPremisesConfiguration =>
GoogleApi.SQLAdmin.V1.Model.OnPremisesConfiguration.t() | nil,
:availableMaintenanceVersions => list(String.t()) | nil,
:instanceType => String.t() | nil,
:ipAddresses => list(GoogleApi.SQLAdmin.V1.Model.IpMapping.t()) | nil,
:replicaNames => list(String.t()) | nil,
:currentDiskSize => String.t() | nil,
:secondaryGceZone => String.t() | nil,
:settings => GoogleApi.SQLAdmin.V1.Model.Settings.t() | nil,
:rootPassword => String.t() | nil,
:selfLink => String.t() | nil,
:backendType => String.t() | nil,
:scheduledMaintenance => GoogleApi.SQLAdmin.V1.Model.SqlScheduledMaintenance.t() | nil,
:maxDiskSize => String.t() | nil,
:failoverReplica =>
GoogleApi.SQLAdmin.V1.Model.DatabaseInstanceFailoverReplica.t() | nil,
:diskEncryptionConfiguration =>
GoogleApi.SQLAdmin.V1.Model.DiskEncryptionConfiguration.t() | nil,
:outOfDiskReport => GoogleApi.SQLAdmin.V1.Model.SqlOutOfDiskReport.t() | nil
}
field(:ipv6Address)
field(:etag)
field(:databaseVersion)
field(:serverCaCert, as: GoogleApi.SQLAdmin.V1.Model.SslCert)
field(:suspensionReason, type: :list)
field(:maintenanceVersion)
field(:serviceAccountEmailAddress)
field(:masterInstanceName)
field(:createTime, as: DateTime)
field(:satisfiesPzs)
field(:databaseInstalledVersion)
field(:kind)
field(:gceZone)
field(:project)
field(:name)
field(:state)
field(:region)
field(:replicaConfiguration, as: GoogleApi.SQLAdmin.V1.Model.ReplicaConfiguration)
field(:diskEncryptionStatus, as: GoogleApi.SQLAdmin.V1.Model.DiskEncryptionStatus)
field(:connectionName)
field(:onPremisesConfiguration, as: GoogleApi.SQLAdmin.V1.Model.OnPremisesConfiguration)
field(:availableMaintenanceVersions, type: :list)
field(:instanceType)
field(:ipAddresses, as: GoogleApi.SQLAdmin.V1.Model.IpMapping, type: :list)
field(:replicaNames, type: :list)
field(:currentDiskSize)
field(:secondaryGceZone)
field(:settings, as: GoogleApi.SQLAdmin.V1.Model.Settings)
field(:rootPassword)
field(:selfLink)
field(:backendType)
field(:scheduledMaintenance, as: GoogleApi.SQLAdmin.V1.Model.SqlScheduledMaintenance)
field(:maxDiskSize)
field(:failoverReplica, as: GoogleApi.SQLAdmin.V1.Model.DatabaseInstanceFailoverReplica)
field(:diskEncryptionConfiguration, as: GoogleApi.SQLAdmin.V1.Model.DiskEncryptionConfiguration)
field(:outOfDiskReport, as: GoogleApi.SQLAdmin.V1.Model.SqlOutOfDiskReport)
end
defimpl Poison.Decoder, for: GoogleApi.SQLAdmin.V1.Model.DatabaseInstance do
def decode(value, options) do
GoogleApi.SQLAdmin.V1.Model.DatabaseInstance.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.SQLAdmin.V1.Model.DatabaseInstance do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 71.896774 | 375 | 0.702889 |
9eb770c060bac9d7660ebad8799b3c132fe3f9f9 | 642 | ex | Elixir | implementations/elixir/ockam/ockam/lib/ockam/examples/messaging/filter.ex | jared-s/ockam | a1d482550aeafbc2a6040a5efb3f5effc9974d51 | [
"Apache-2.0"
] | null | null | null | implementations/elixir/ockam/ockam/lib/ockam/examples/messaging/filter.ex | jared-s/ockam | a1d482550aeafbc2a6040a5efb3f5effc9974d51 | [
"Apache-2.0"
] | 119 | 2021-07-20T15:12:29.000Z | 2022-03-01T10:31:53.000Z | implementations/elixir/ockam/ockam/lib/ockam/examples/messaging/filter.ex | jared-s/ockam | a1d482550aeafbc2a6040a5efb3f5effc9974d51 | [
"Apache-2.0"
] | null | null | null | defmodule Ockam.Examples.Messaging.Filter do
@moduledoc """
Filter worker
Randomly drops 5% of messages, forwards the rest to onward_route
Adds itself to return_route
"""
use Ockam.Worker
alias Ockam.Message
@impl true
def handle_message(message, state) do
if :rand.uniform(100) > 5 do
forward_message(message)
end
{:ok, state}
end
def forward_message(message) do
[me | onward_route] = Message.onward_route(message)
Ockam.Router.route(%{
onward_route: onward_route,
return_route: [me | Message.return_route(message)],
payload: Message.payload(message)
})
end
end
| 20.0625 | 66 | 0.688474 |
9eb770e1fc6a7d573747e729e83de53dc24349c4 | 5,271 | ex | Elixir | lib/aws/generated/transcribe_streaming.ex | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/transcribe_streaming.ex | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/transcribe_streaming.ex | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.TranscribeStreaming do
@moduledoc """
Operations and objects for transcribing streaming speech to text.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2017-10-26",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "transcribestreaming",
global?: false,
protocol: "rest-json",
service_id: "Transcribe Streaming",
signature_version: "v4",
signing_name: "transcribe",
target_prefix: nil
}
end
@doc """
Starts a bidirectional HTTP/2 stream where audio is streamed to Amazon
Transcribe Medical and the transcription results are streamed to your
application.
"""
def start_medical_stream_transcription(%Client{} = client, input, options \\ []) do
url_path = "/medical-stream-transcription"
{headers, input} =
[
{"ContentIdentificationType", "x-amzn-transcribe-content-identification-type"},
{"EnableChannelIdentification", "x-amzn-transcribe-enable-channel-identification"},
{"LanguageCode", "x-amzn-transcribe-language-code"},
{"MediaEncoding", "x-amzn-transcribe-media-encoding"},
{"MediaSampleRateHertz", "x-amzn-transcribe-sample-rate"},
{"NumberOfChannels", "x-amzn-transcribe-number-of-channels"},
{"SessionId", "x-amzn-transcribe-session-id"},
{"ShowSpeakerLabel", "x-amzn-transcribe-show-speaker-label"},
{"Specialty", "x-amzn-transcribe-specialty"},
{"Type", "x-amzn-transcribe-type"},
{"VocabularyName", "x-amzn-transcribe-vocabulary-name"}
]
|> Request.build_params(input)
query_params = []
options =
Keyword.put(
options,
:response_header_parameters,
[
{"x-amzn-transcribe-content-identification-type", "ContentIdentificationType"},
{"x-amzn-transcribe-enable-channel-identification", "EnableChannelIdentification"},
{"x-amzn-transcribe-language-code", "LanguageCode"},
{"x-amzn-transcribe-media-encoding", "MediaEncoding"},
{"x-amzn-transcribe-sample-rate", "MediaSampleRateHertz"},
{"x-amzn-transcribe-number-of-channels", "NumberOfChannels"},
{"x-amzn-request-id", "RequestId"},
{"x-amzn-transcribe-session-id", "SessionId"},
{"x-amzn-transcribe-show-speaker-label", "ShowSpeakerLabel"},
{"x-amzn-transcribe-specialty", "Specialty"},
{"x-amzn-transcribe-type", "Type"},
{"x-amzn-transcribe-vocabulary-name", "VocabularyName"}
]
)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Starts a bidirectional HTTP2 stream where audio is streamed to Amazon Transcribe
and the transcription results are streamed to your application.
The following are encoded as HTTP2 headers:
* x-amzn-transcribe-language-code
* x-amzn-transcribe-media-encoding
* x-amzn-transcribe-sample-rate
* x-amzn-transcribe-session-id
"""
def start_stream_transcription(%Client{} = client, input, options \\ []) do
url_path = "/stream-transcription"
{headers, input} =
[
{"EnableChannelIdentification", "x-amzn-transcribe-enable-channel-identification"},
{"LanguageCode", "x-amzn-transcribe-language-code"},
{"MediaEncoding", "x-amzn-transcribe-media-encoding"},
{"MediaSampleRateHertz", "x-amzn-transcribe-sample-rate"},
{"NumberOfChannels", "x-amzn-transcribe-number-of-channels"},
{"SessionId", "x-amzn-transcribe-session-id"},
{"ShowSpeakerLabel", "x-amzn-transcribe-show-speaker-label"},
{"VocabularyFilterMethod", "x-amzn-transcribe-vocabulary-filter-method"},
{"VocabularyFilterName", "x-amzn-transcribe-vocabulary-filter-name"},
{"VocabularyName", "x-amzn-transcribe-vocabulary-name"}
]
|> Request.build_params(input)
query_params = []
options =
Keyword.put(
options,
:response_header_parameters,
[
{"x-amzn-transcribe-enable-channel-identification", "EnableChannelIdentification"},
{"x-amzn-transcribe-language-code", "LanguageCode"},
{"x-amzn-transcribe-media-encoding", "MediaEncoding"},
{"x-amzn-transcribe-sample-rate", "MediaSampleRateHertz"},
{"x-amzn-transcribe-number-of-channels", "NumberOfChannels"},
{"x-amzn-request-id", "RequestId"},
{"x-amzn-transcribe-session-id", "SessionId"},
{"x-amzn-transcribe-show-speaker-label", "ShowSpeakerLabel"},
{"x-amzn-transcribe-vocabulary-filter-method", "VocabularyFilterMethod"},
{"x-amzn-transcribe-vocabulary-filter-name", "VocabularyFilterName"},
{"x-amzn-transcribe-vocabulary-name", "VocabularyName"}
]
)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
| 34.45098 | 93 | 0.642762 |
9eb792b6dfdfde10bb3fed25a56ec4c99293f05b | 916 | ex | Elixir | lib/mix/tasks/google_apis.convert.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/google_apis.convert.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/google_apis.convert.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule Mix.Tasks.GoogleApis.Convert do
use Mix.Task
@shortdoc "Convert GoogleApi discover definitions to OpenApi"
def run([only]) do
only
|> GoogleApis.ApiConfig.load()
|> Enum.each(&GoogleApis.convert_spec/1)
end
def run(_) do
Enum.each(GoogleApis.ApiConfig.load_all(), &GoogleApis.convert_spec/1)
end
end
| 31.586207 | 74 | 0.744541 |
9eb7a90f6e67c99b498ca6e20341486fd7dfa8f9 | 1,021 | exs | Elixir | rumbl/config/config.exs | arilsonsouza/programming_phoenix | 71a2a44e8cf84b6b133422899324363a09ccc07c | [
"MIT"
] | null | null | null | rumbl/config/config.exs | arilsonsouza/programming_phoenix | 71a2a44e8cf84b6b133422899324363a09ccc07c | [
"MIT"
] | null | null | null | rumbl/config/config.exs | arilsonsouza/programming_phoenix | 71a2a44e8cf84b6b133422899324363a09ccc07c | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
use Mix.Config
config :rumbl,
ecto_repos: [Rumbl.Repo]
# Configures the endpoint
config :rumbl, RumblWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "8lwzEgc0dzmef1912k2R9K2h17MI2/PH95f9zTp/lLB6XLkU1CUlvoyDY4MW2FHU",
render_errors: [view: RumblWeb.ErrorView, accepts: ~w(html json), layout: false],
pubsub_server: Rumbl.PubSub,
live_view: [signing_salt: "AiJHZfql"]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 31.90625 | 86 | 0.766895 |
9eb7ac5a0abc3e7a95d9ca681a1d1a79faf42f2f | 2,479 | ex | Elixir | clients/cloud_error_reporting/lib/google_api/cloud_error_reporting/v1beta1/model/error_event.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/cloud_error_reporting/lib/google_api/cloud_error_reporting/v1beta1/model/error_event.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/cloud_error_reporting/lib/google_api/cloud_error_reporting/v1beta1/model/error_event.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudErrorReporting.V1beta1.Model.ErrorEvent do
@moduledoc """
An error event which is returned by the Error Reporting system.
## Attributes
* `context` (*type:* `GoogleApi.CloudErrorReporting.V1beta1.Model.ErrorContext.t`, *default:* `nil`) - Data about the context in which the error occurred.
* `eventTime` (*type:* `DateTime.t`, *default:* `nil`) - Time when the event occurred as provided in the error report. If the report did not contain a timestamp, the time the error was received by the Error Reporting system is used.
* `message` (*type:* `String.t`, *default:* `nil`) - The stack trace that was reported or logged by the service.
* `serviceContext` (*type:* `GoogleApi.CloudErrorReporting.V1beta1.Model.ServiceContext.t`, *default:* `nil`) - The `ServiceContext` for which this error was reported.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:context => GoogleApi.CloudErrorReporting.V1beta1.Model.ErrorContext.t(),
:eventTime => DateTime.t(),
:message => String.t(),
:serviceContext => GoogleApi.CloudErrorReporting.V1beta1.Model.ServiceContext.t()
}
field(:context, as: GoogleApi.CloudErrorReporting.V1beta1.Model.ErrorContext)
field(:eventTime, as: DateTime)
field(:message)
field(:serviceContext, as: GoogleApi.CloudErrorReporting.V1beta1.Model.ServiceContext)
end
defimpl Poison.Decoder, for: GoogleApi.CloudErrorReporting.V1beta1.Model.ErrorEvent do
def decode(value, options) do
GoogleApi.CloudErrorReporting.V1beta1.Model.ErrorEvent.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudErrorReporting.V1beta1.Model.ErrorEvent do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 44.267857 | 236 | 0.742638 |
9eb7b22b02d2f84c4493083ab0eef294c13c2a9a | 3,129 | ex | Elixir | lib/tz_world/backend/ets_with_index_cache.ex | LostKobrakai/tz_world | 65e138301adc0c9e6d6135897d2607ef6c361bca | [
"MIT"
] | null | null | null | lib/tz_world/backend/ets_with_index_cache.ex | LostKobrakai/tz_world | 65e138301adc0c9e6d6135897d2607ef6c361bca | [
"MIT"
] | null | null | null | lib/tz_world/backend/ets_with_index_cache.ex | LostKobrakai/tz_world | 65e138301adc0c9e6d6135897d2607ef6c361bca | [
"MIT"
] | null | null | null | defmodule TzWorld.Backend.EtsWithIndexCache do
@moduledoc false
@behaviour TzWorld.Backend
use GenServer
alias Geo.Point
@timeout 10_000
@tz_world_version :tz_world_version
@doc false
@options [:named_table, :compressed, read_concurrency: true]
def start_link(options \\ @options) do
options = if options == [], do: @options, else: options
GenServer.start_link(__MODULE__, options, name: __MODULE__)
end
def init(options) do
{:ok, [], {:continue, {:load_data, options}}}
end
def version do
GenServer.call(__MODULE__, :version, @timeout)
end
@spec timezone_at(Geo.Point.t()) :: {:ok, String.t()} | {:error, atom}
def timezone_at(%Point{} = point) do
GenServer.call(__MODULE__, {:timezone_at, point}, @timeout)
end
@spec all_timezones_at(Geo.Point.t()) :: {:ok, [String.t()]} | {:error, atom}
def all_timezones_at(%Point{} = point) do
GenServer.call(__MODULE__, {:all_timezones_at, point}, @timeout)
end
@spec reload_timezone_data :: {:ok, term}
def reload_timezone_data do
GenServer.call(__MODULE__, :reload_data, @timeout)
end
def load_geodata do
{:ok, t} = TzWorld.Backend.Dets.get_geodata_table()
:dets.to_ets(t, __MODULE__)
end
# --- Server callback implementation
@doc false
def handle_continue({:load_data, options}, _state) do
__MODULE__ = :ets.new(__MODULE__, options)
__MODULE__ = load_geodata()
{:noreply, get_index_cache()}
end
@doc false
def handle_call({:timezone_at, %Geo.Point{} = point}, _from, state) do
{:reply, find_zone(point, state), state}
end
@doc false
def handle_call({:all_timezones_at, %Geo.Point{} = point}, _from, state) do
{:reply, find_zones(point, state), state}
end
@doc false
def handle_call(:version, _from, state) do
[{_, version}] = :ets.lookup(__MODULE__, @tz_world_version)
{:reply, version, state}
end
@doc false
def handle_call(:reload_data, _from, _state) do
{:reply, {:ok, load_geodata()}, get_index_cache()}
end
defp find_zone(%Geo.Point{} = point, state) do
point
|> select_candidates(state)
|> Enum.find(&TzWorld.contains?(&1, point))
|> case do
%Geo.MultiPolygon{properties: %{tzid: tzid}} -> {:ok, tzid}
%Geo.Polygon{properties: %{tzid: tzid}} -> {:ok, tzid}
nil -> {:error, :time_zone_not_found}
end
end
defp find_zones(%Geo.Point{} = point, state) do
point
|> select_candidates(state)
|> Enum.filter(&TzWorld.contains?(&1, point))
|> Enum.map(&(&1.properties.tzid))
|> wrap(:ok)
end
defp wrap(term, atom) do
{atom, term}
end
defp select_candidates(%{coordinates: {lng, lat}}, state) do
Enum.filter(state, fn {x_min, x_max, y_min, y_max} ->
lng >= x_min && lng <= x_max && lat >= y_min && lat <= y_max
end)
|> Enum.map(fn bounding_box ->
[{_key, value}] = :ets.lookup(__MODULE__, bounding_box)
value
end)
end
def get_index_cache do
:ets.select(__MODULE__, index_spec())
end
def index_spec do
[{{{:"$1", :"$2", :"$3", :"$4"}, :"$5"}, [], [{{:"$1", :"$2", :"$3", :"$4"}}]}]
end
end
| 26.294118 | 83 | 0.639821 |
9eb7bdddc9c27cb1c58e9adf1bb7111c0abd5810 | 3,237 | ex | Elixir | lib/azurex/authorization/shared_key.ex | Zinoex/azurex | ba6e56434d0ff6f5cda7332718fd3080b800cc4b | [
"MIT"
] | null | null | null | lib/azurex/authorization/shared_key.ex | Zinoex/azurex | ba6e56434d0ff6f5cda7332718fd3080b800cc4b | [
"MIT"
] | 1 | 2021-05-05T23:09:55.000Z | 2021-05-05T23:09:55.000Z | lib/azurex/authorization/shared_key.ex | workplacearcade/azurex | 9e9b42870db4c2fe28893b604a178e0ce1e8fdd7 | [
"MIT"
] | null | null | null | defmodule Azurex.Authorization.SharedKey do
@moduledoc """
Implements Azure Rest Api Authorization method.
It is based on: https://docs.microsoft.com/en-us/rest/api/storageservices/authorize-with-shared-key
As defined in 26 November 2019
"""
@spec sign(HTTPoison.Request.t(), keyword) :: HTTPoison.Request.t()
def sign(request, opts \\ []) do
storage_account_name = Keyword.fetch!(opts, :storage_account_name)
storage_account_key = Keyword.fetch!(opts, :storage_account_key)
content_type = Keyword.get(opts, :content_type)
request = put_standard_headers(request, content_type)
method = get_method(request)
size = get_size(request)
headers = get_headers_signature(request)
uri_signature = get_uri_signature(request, storage_account_name)
signature =
[
# HTTP Verb
method,
# Content-Encoding
"",
# Content-Language
"",
# Content-Length
size,
# Content-MD5
"",
# Content-Type
content_type || "",
# Date
"",
# If-Modified-Since
"",
# If-Match
"",
# If-None-Match
"",
# If-Unmodified-Since
"",
# Range
"",
# CanonicalizedHeaders
headers,
# CanonicalizedResource
uri_signature
]
|> Enum.join("\n")
put_signature(request, signature, storage_account_name, storage_account_key)
end
defp put_standard_headers(request, content_type) do
now =
Timex.now("GMT") |> Timex.format!("{WDshort}, {0D} {Mshort} {YYYY} {h24}:{m}:{s} {Zname}")
headers =
if content_type,
do: [{"content-type", content_type} | request.headers],
else: request.headers
headers = [
{"x-ms-version", "2019-12-12"},
{"x-ms-date", now}
| headers
]
struct(request, headers: headers)
end
defp get_method(request), do: request.method |> Atom.to_string() |> String.upcase()
defp get_size(request) do
size = request.body |> byte_size()
if size != 0, do: size, else: ""
end
defp get_headers_signature(request) do
request.headers
|> Enum.map(fn {k, v} -> {String.downcase(k), v} end)
|> Enum.filter(fn {k, _v} -> String.starts_with?(k, "x-ms-") end)
|> Enum.group_by(fn {k, _v} -> k end, fn {_k, v} -> v end)
|> Enum.sort_by(fn {k, _v} -> k end)
|> Enum.map(fn {k, v} ->
v = v |> Enum.sort() |> Enum.join(",")
k <> ":" <> v
end)
|> Enum.join("\n")
end
defp get_uri_signature(request, storage_account_name) do
uri = URI.parse(request.url)
path = uri.path || "/"
query = URI.query_decoder(uri.query || "")
[
"/",
storage_account_name,
path
| Enum.map(query, fn {k, v} ->
["\n", k, ":", v]
end)
]
end
defp put_signature(request, signature, storage_account_name, storage_account_key) do
signature =
:crypto.mac(:hmac, :sha256, storage_account_key, signature)
|> Base.encode64()
authorization = {"authorization", "SharedKey #{storage_account_name}:#{signature}"}
headers = [authorization | request.headers]
struct(request, headers: headers)
end
end
| 26.532787 | 101 | 0.591597 |
9eb7bff0489ec391c8151a2040ebc23026177f12 | 1,709 | ex | Elixir | clients/content/lib/google_api/content/v21/model/free_listings_program_status_review_ineligibility_reason_details.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v21/model/free_listings_program_status_review_ineligibility_reason_details.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v21/model/free_listings_program_status_review_ineligibility_reason_details.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V21.Model.FreeListingsProgramStatusReviewIneligibilityReasonDetails do
@moduledoc """
Additional details for review ineligibility reasons.
## Attributes
* `cooldownTime` (*type:* `DateTime.t`, *default:* `nil`) - This timestamp represents end of cooldown period for review ineligbility reason `IN_COOLDOWN_PERIOD`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:cooldownTime => DateTime.t() | nil
}
field(:cooldownTime, as: DateTime)
end
defimpl Poison.Decoder,
for: GoogleApi.Content.V21.Model.FreeListingsProgramStatusReviewIneligibilityReasonDetails do
def decode(value, options) do
GoogleApi.Content.V21.Model.FreeListingsProgramStatusReviewIneligibilityReasonDetails.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Content.V21.Model.FreeListingsProgramStatusReviewIneligibilityReasonDetails do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.865385 | 165 | 0.761264 |
9eb7c1cdd476296be79b91e03e602028dd23eebb | 788 | ex | Elixir | lib/ex_prometheus_io.ex | kennyballou/ex_prometheus_io | da2a3bf02ee083ba1caef3673ebef0e708b9b9ee | [
"Apache-2.0"
] | 2 | 2016-03-16T12:47:48.000Z | 2016-03-16T19:16:04.000Z | lib/ex_prometheus_io.ex | kennyballou/ex_prometheus_io | da2a3bf02ee083ba1caef3673ebef0e708b9b9ee | [
"Apache-2.0"
] | null | null | null | lib/ex_prometheus_io.ex | kennyballou/ex_prometheus_io | da2a3bf02ee083ba1caef3673ebef0e708b9b9ee | [
"Apache-2.0"
] | null | null | null | defmodule ExPrometheusIo do
use Application
def start(_, _) do
ExPrometheusIo.Supervisor.start_link
end
def query(query, _opts \\ []) do
query_opts = [query]
spawn_query(:query, query_opts)
end
def range(query, start_ts, end_ts, step, _opts \\ []) do
query_opts = {query, start_ts, end_ts, step}
spawn_query(:range, query_opts)
end
def series(matches, _opts \\ []) when is_list(matches) do
spawn_query(:series, {matches})
end
defp spawn_query(query, query_opts, _opts \\ []) do
query_ref = make_ref()
query_opts = [query, query_opts, query_ref, self()]
{:ok, pid} = Task.Supervisor.start_child(
ExPrometheusIo.QuerySupervisor,
ExPrometheusIo.Query,
:process,
query_opts)
{pid, query_ref}
end
end
| 23.176471 | 59 | 0.666244 |
9eb7f7853b47762b399853873416b4456631502c | 150 | ex | Elixir | test/support/models/channel/sms_result.ex | daveli/polymorphic_embed | 8f6c8180a682f26aec98c6e9cbd84c20b6895be9 | [
"Apache-2.0"
] | null | null | null | test/support/models/channel/sms_result.ex | daveli/polymorphic_embed | 8f6c8180a682f26aec98c6e9cbd84c20b6895be9 | [
"Apache-2.0"
] | null | null | null | test/support/models/channel/sms_result.ex | daveli/polymorphic_embed | 8f6c8180a682f26aec98c6e9cbd84c20b6895be9 | [
"Apache-2.0"
] | 1 | 2020-11-26T01:50:50.000Z | 2020-11-26T01:50:50.000Z | defmodule PolymorphicEmbed.Channel.SMSResult do
use Ecto.Schema
@primary_key false
embedded_schema do
field(:success, :boolean)
end
end
| 15 | 47 | 0.76 |
9eb813544ddf5fed3cc9ea90536dcf8429c4dafc | 8,734 | ex | Elixir | lib/livebook_web/helpers.ex | Fudoshiki/livebook | 0b30fd02d9a50b84873725f3e05974d62fee398f | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/helpers.ex | Fudoshiki/livebook | 0b30fd02d9a50b84873725f3e05974d62fee398f | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/helpers.ex | Fudoshiki/livebook | 0b30fd02d9a50b84873725f3e05974d62fee398f | [
"Apache-2.0"
] | null | null | null | defmodule LivebookWeb.Helpers do
use Phoenix.Component
alias Phoenix.LiveView.JS
alias LivebookWeb.Router.Helpers, as: Routes
alias Livebook.FileSystem
@doc """
Wraps the given content in a modal dialog.
When closed, the modal redirects to the given `:return_to` URL.
## Example
<.live_modal return_to={...}>
<.live_component module={MyComponent} />
</.live_modal>
"""
def modal(assigns) do
assigns =
assigns
|> assign_new(:class, fn -> "" end)
~H"""
<div class="fixed z-[10000] inset-0 fade-in" phx-remove={JS.transition("fade-out")}>
<!-- Modal container -->
<div class="h-screen flex items-center justify-center p-4">
<!-- Overlay -->
<div class="absolute inset-0 bg-gray-500 opacity-75 z-0" aria-hidden="true"></div>
<!-- Modal box -->
<div class={"relative max-h-full overflow-y-auto bg-white rounded-lg shadow-xl #{@class}"}
role="dialog"
aria-modal="true"
phx-window-keydown={click_modal_close()}
phx-click-away={click_modal_close()}
phx-key="escape">
<%= live_patch to: @return_to,
class: "absolute top-6 right-6 text-gray-400 flex space-x-1 items-center",
aria_label: "close modal",
id: "close-modal-button" do %>
<span class="text-sm">(esc)</span>
<.remix_icon icon="close-line" class="text-2xl" />
<% end %>
<%= render_slot(@inner_block) %>
</div>
</div>
</div>
"""
end
defp click_modal_close(js \\ %JS{}) do
JS.dispatch(js, "click", to: "#close-modal-button")
end
@doc """
Determines user platform based on the given *User-Agent* header.
"""
@spec platform_from_user_agent(String.t()) :: :linux | :mac | :windows | :other
def platform_from_user_agent(user_agent) when is_binary(user_agent) do
cond do
linux?(user_agent) -> :linux
mac?(user_agent) -> :mac
windows?(user_agent) -> :windows
true -> :other
end
end
defp linux?(user_agent), do: String.match?(user_agent, ~r/Linux/)
defp mac?(user_agent), do: String.match?(user_agent, ~r/Mac OS X/)
defp windows?(user_agent), do: String.match?(user_agent, ~r/Windows/)
@doc """
Returns path to specific process dialog within LiveDashboard.
"""
def live_dashboard_process_path(socket, pid) do
pid_str = Phoenix.LiveDashboard.PageBuilder.encode_pid(pid)
Routes.live_dashboard_path(socket, :page, node(), "processes", info: pid_str)
end
@doc """
Converts human-readable strings to strings which can be used
as HTML element IDs (compatible with HTML5)
At the same time duplicate IDs are enumerated to avoid duplicates
"""
@spec names_to_html_ids(list(String.t())) :: list(String.t())
def names_to_html_ids(names) do
names
|> Enum.map(&name_to_html_id/1)
|> Enum.map_reduce(%{}, fn html_id, counts ->
counts = Map.update(counts, html_id, 1, &(&1 + 1))
case counts[html_id] do
1 -> {html_id, counts}
count -> {"#{html_id}-#{count}", counts}
end
end)
|> elem(0)
end
defp name_to_html_id(name) do
name
|> String.trim()
|> String.downcase()
|> String.replace(~r/\s+/u, "-")
end
@doc """
Renders [Remix](https://remixicon.com) icon.
## Examples
<.remix_icon icon="cpu-line" />
<.remix_icon icon="cpu-line" class="align-middle mr-1" />
"""
def remix_icon(assigns) do
assigns =
assigns
|> assign_new(:class, fn -> "" end)
|> assign(:attrs, assigns_to_attributes(assigns, [:icon, :class]))
~H"""
<i class={"ri-#{@icon} #{@class}"} aria-hidden="true" {@attrs}></i>
"""
end
@doc """
Renders a list of select input options with the given one selected.
## Examples
<.select
name="language"
selected={@language}
options={[en: "English", pl: "Polski", fr: "Français"]} />
"""
def select(assigns) do
~H"""
<select class="input" name={@name}>
<%= for {value, label} <- @options do %>
<option value={value} selected={value == @selected}>
<%= label %>
</option>
<% end %>
</select>
"""
end
@doc """
Renders a checkbox input styled as a switch.
Also, a hidden input with the same name is rendered
alongside the checkbox, so the submitted value is
always either `"true"` or `"false"`.
## Examples
<.switch_checkbox
name="likes_cats"
label="I very much like cats"
checked={@likes_cats} />
"""
def switch_checkbox(assigns) do
assigns =
assigns
|> assign_new(:label, fn -> nil end)
|> assign_new(:disabled, fn -> false end)
|> assign_new(:class, fn -> "" end)
|> assign(
:attrs,
assigns_to_attributes(assigns, [:label, :name, :checked, :disabled, :class])
)
~H"""
<div class="flex space-x-3 items-center justify-between">
<%= if @label do %>
<span class="text-gray-700"><%= @label %></span>
<% end %>
<label class={"switch-button #{if(@disabled, do: "switch-button--disabled")}"}>
<input type="hidden" value="false" name={@name} />
<input
type="checkbox"
value="true"
class={"switch-button__checkbox #{@class}"}
name={@name}
checked={@checked}
{@attrs} />
<div class="switch-button__bg"></div>
</label>
</div>
"""
end
@doc """
Renders a choice button that is either active or not.
## Examples
<.choice_button active={@tab == "my_tab"} phx-click="set_my_tab">
My tab
</.choice_button>
"""
def choice_button(assigns) do
assigns =
assigns
|> assign_new(:class, fn -> "" end)
|> assign_new(:disabled, fn -> assigns.active end)
|> assign(:attrs, assigns_to_attributes(assigns, [:active, :class, :disabled]))
~H"""
<button class={"choice-button #{if(@active, do: "active")} #{@class}"} disabled={@disabled} {@attrs}>
<%= render_slot(@inner_block) %>
</button>
"""
end
@doc """
Renders a highlighted code snippet.
## Examples
<.code_preview
source_id="my-snippet"
language="elixir"
source="System.version()" />
"""
def code_preview(assigns) do
~H"""
<div class="markdown">
<pre><code
class="tiny-scrollbar"
id={"#{@source_id}-highlight"}
phx-hook="Highlight"
data-language={@language}><div id={@source_id} data-source><%= @source %></div><div data-target></div></code></pre>
</div>
"""
end
@doc """
Renders text with a tiny label.
## Examples
<.labeled_text label="Name" text="Sherlock Holmes" />
"""
def labeled_text(assigns) do
~H"""
<div class="flex flex-col space-y-1">
<span class="text-xs text-gray-500">
<%= @label %>
</span>
<span class="text-gray-800 text-sm font-semibold">
<%= @text %>
</span>
</div>
"""
end
@doc """
Renders a wrapper around password input
with an added visibility toggle button.
The toggle switches the input's type between `password`
and `text`.
## Examples
<.with_password_toggle id="input-id">
<input type="password" ...>
</.with_password_toggle>
"""
def with_password_toggle(assigns) do
~H"""
<div id={"password-toggle-#{@id}"} class="relative inline w-min" phx-hook="PasswordToggle">
<!-- render password input -->
<%= render_slot(@inner_block) %>
<button
class="bg-gray-50 p-1 icon-button absolute inset-y-0 right-1"
type="button"
aria-label="toggle password visibility"
phx-change="ignore">
<.remix_icon icon="eye-line" class="text-xl" />
</button>
</div>
"""
end
defdelegate ansi_string_to_html(string), to: LivebookWeb.Helpers.ANSI
defdelegate ansi_string_to_html_lines(string), to: LivebookWeb.Helpers.ANSI
@doc """
Renders an icon representing the given file system.
"""
def file_system_icon(assigns)
def file_system_icon(%{file_system: %FileSystem.Local{}} = assigns) do
~H"""
<.remix_icon icon="hard-drive-2-line leading-none" />
"""
end
def file_system_icon(%{file_system: %FileSystem.S3{}} = assigns) do
~H"""
<i class="not-italic">
<span class="text-[0.75em] font-semibold align-middle">S3</span>
</i>
"""
end
@doc """
Formats the given file system into a descriptive label.
"""
def file_system_label(file_system)
def file_system_label(%FileSystem.Local{}), do: "Local disk"
def file_system_label(%FileSystem.S3{} = fs), do: fs.bucket_url
end
| 26.95679 | 123 | 0.592283 |
9eb818ca10f3dfa9f7406c07cf782575e82fcdfa | 703 | ex | Elixir | lib/conduit/plug/created_by.ex | peek-travel/conduit | f37b96513769bdf70c89eebd77bb03023d248158 | [
"MIT"
] | 119 | 2016-11-21T13:19:22.000Z | 2021-11-07T17:29:05.000Z | lib/conduit/plug/created_by.ex | peek-travel/conduit | f37b96513769bdf70c89eebd77bb03023d248158 | [
"MIT"
] | 104 | 2018-02-02T20:42:46.000Z | 2021-08-03T05:36:09.000Z | lib/conduit/plug/created_by.ex | peek-travel/conduit | f37b96513769bdf70c89eebd77bb03023d248158 | [
"MIT"
] | 21 | 2018-08-03T02:38:21.000Z | 2022-03-16T18:26:58.000Z | defmodule Conduit.Plug.CreatedBy do
use Conduit.Plug.Builder
@moduledoc """
Assigns name of app to created_by of the message.
## Examples
iex> defmodule MyPipeline do
iex> use Conduit.Plug.Builder
iex>
iex> plug Conduit.Plug.CreatedBy, app: "myapp"
iex> end
iex> message = MyPipeline.run(%Conduit.Message{})
iex> message.created_by
"myapp"
"""
def init(opts) do
_ = Keyword.fetch!(opts, :app)
opts
end
@doc """
Assigns created_by.
"""
def call(message, next, opts) do
created_by =
opts
|> Keyword.get(:app)
|> to_string
message
|> put_created_by(created_by)
|> next.()
end
end
| 17.575 | 55 | 0.603129 |
9eb81b79e35d2e65d0c8d7acecad2a56ffb13d05 | 3,569 | ex | Elixir | lib/elixir/lib/map_set.ex | pap/elixir | c803afe90c766663823c74397fb23ed40ec52c5b | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/map_set.ex | pap/elixir | c803afe90c766663823c74397fb23ed40ec52c5b | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/map_set.ex | pap/elixir | c803afe90c766663823c74397fb23ed40ec52c5b | [
"Apache-2.0"
] | null | null | null | defmodule MapSet do
@moduledoc """
A set store.
The `MapSet` is represented internally as a struct, therefore
`%MapSet{}` can be used whenever there is a need to match
on any `MapSet`. Note though the struct fields are private and
must not be accessed directly. Instead, use the functions on this
or in the `Set` module.
The `MapSet` is implemented using `Map` data type.
For more information about the functions
and their APIs, please consult the `Set` module.
"""
@behaviour Set
@opaque t :: %__MODULE__{map: map}
defstruct map: %{}
@spec new :: t
def new(), do: %MapSet{}
@doc """
Creates a mapset from an enumerable.
## Examples
iex> MapSet.new([:b, :a, 3])
#MapSet<[3, :a, :b]>
iex> MapSet.new([3, 3, 3, 2, 2, 1])
#MapSet<[1, 2, 3]>
"""
@spec new(Enum.t) :: t
def new(enumerable) do
Enum.reduce(enumerable, %MapSet{}, &put(&2, &1))
end
@doc """
Creates a mapset from an enumerable via the transformation function.
## Examples
iex> MapSet.new([1, 2, 1], fn x -> 2 * x end)
#MapSet<[2, 4]>
"""
@spec new(Enum.t, (term -> term)) :: t
def new(enumerable, transform) do
Enum.reduce(enumerable, %MapSet{}, &put(&2, transform.(&1)))
end
def delete(%MapSet{map: map} = set, term) do
%{set | map: Map.delete(map, term)}
end
def difference(%MapSet{} = set1, %MapSet{} = set2) do
reduce(set2, {:cont, set1}, fn value, acc ->
{:cont, delete(acc, value)}
end) |> elem(1)
end
def disjoint?(%MapSet{} = set1, %MapSet{} = set2) do
if size(set1) > size(set2), do: {set1, set2} = {set2, set1}
reduce(set1, {:cont, true}, fn value, _ ->
if member?(set2, value) do
{:halt, false}
else
{:cont, true}
end
end) |> elem(1)
end
def equal?(%MapSet{map: map1}, %MapSet{map: map2}) do
Map.equal?(map1, map2)
end
def intersection(%MapSet{} = set1, %MapSet{} = set2) do
if size(set1) > size(set2), do: {set1, set2} = {set2, set1}
reduce(set1, {:cont, new}, fn value, acc ->
if member?(set2, value) do
{:cont, put(acc, value)}
else
{:cont, acc}
end
end) |> elem(1)
end
def member?(%MapSet{map: map}, value) do
Map.has_key?(map, value)
end
def put(%MapSet{map: map} = set, value) do
%{set | map: Map.put(map, value, nil)}
end
def size(%MapSet{map: map}) do
map_size(map)
end
def subset?(%MapSet{} = set1, %MapSet{} = set2) do
if size(set1) <= size(set2) do
reduce(set1, {:cont, true}, fn value, _ ->
if member?(set2, value), do: {:cont, true}, else: {:halt, false}
end) |> elem(1)
else
false
end
end
@doc false
def reduce(%MapSet{} = set, acc, fun) do
Enumerable.List.reduce(to_list(set), acc, fun)
end
def to_list(%MapSet{map: map}) do
Map.keys(map)
end
def union(%MapSet{map: map1}, %MapSet{map: map2}) do
%MapSet{map: Map.merge(map1, map2)}
end
defimpl Enumerable do
def reduce(set, acc, fun), do: MapSet.reduce(set, acc, fun)
def member?(set, val), do: {:ok, MapSet.member?(set, val)}
def count(set), do: {:ok, MapSet.size(set)}
end
defimpl Collectable do
def into(original) do
{original, fn
set, {:cont, x} -> MapSet.put(set, x)
set, :done -> set
_, :halt -> :ok
end}
end
end
defimpl Inspect do
import Inspect.Algebra
def inspect(set, opts) do
concat ["#MapSet<", Inspect.List.inspect(MapSet.to_list(set), opts), ">"]
end
end
end
| 23.793333 | 79 | 0.579994 |
9eb82494d459c51ca2570670337d7a032bc28c05 | 831 | exs | Elixir | test/gcs_signed_url/iso_date_time_test.exs | code-supply/gcs_signed_url | e3e72b8b11e80c47899713189f03d94e28b2a831 | [
"MIT"
] | 16 | 2019-07-25T16:05:20.000Z | 2022-02-09T06:24:55.000Z | test/gcs_signed_url/iso_date_time_test.exs | code-supply/gcs_signed_url | e3e72b8b11e80c47899713189f03d94e28b2a831 | [
"MIT"
] | 53 | 2020-04-21T16:35:49.000Z | 2022-03-28T13:04:07.000Z | test/gcs_signed_url/iso_date_time_test.exs | code-supply/gcs_signed_url | e3e72b8b11e80c47899713189f03d94e28b2a831 | [
"MIT"
] | 10 | 2020-04-21T12:19:47.000Z | 2021-11-19T12:00:06.000Z | defmodule GcsSignedUrl.ISODateTimeTest do
use ExUnit.Case
alias GcsSignedUrl.ISODateTime, as: MUT
describe "generate/0" do
test "returns a DateTime Struct" do
iso_date_time = MUT.generate()
assert MUT == iso_date_time.__struct__
end
end
describe "generate/1" do
test "returns correct data" do
date_time = %DateTime{
year: 2000,
month: 2,
day: 29,
zone_abbr: "AMT",
hour: 23,
minute: 0,
second: 7,
microsecond: {0, 0},
utc_offset: 7200,
std_offset: 0,
time_zone: "Europe/Zurich"
}
iso_date_time = MUT.generate(date_time)
assert MUT == iso_date_time.__struct__
assert "20000229T210007Z" == iso_date_time.datetime
assert "20000229" == iso_date_time.date
end
end
end
| 23.083333 | 57 | 0.614922 |
9eb85d91f6808df7cfe516f094f27cb86e72540d | 1,484 | ex | Elixir | node_modules/@snyk/snyk-hex-plugin/elixirsrc/lib/mix_project.ex | muhamarief/cobafrappe | 9f4c787338873e774d73779a8f1cee168daa2b62 | [
"MIT"
] | null | null | null | node_modules/@snyk/snyk-hex-plugin/elixirsrc/lib/mix_project.ex | muhamarief/cobafrappe | 9f4c787338873e774d73779a8f1cee168daa2b62 | [
"MIT"
] | null | null | null | node_modules/@snyk/snyk-hex-plugin/elixirsrc/lib/mix_project.ex | muhamarief/cobafrappe | 9f4c787338873e774d73779a8f1cee168daa2b62 | [
"MIT"
] | null | null | null | import Snyk.MixProject.Common
defmodule Snyk.MixProject.Mix.Project do
def load_mix_project(""), do: error("Please provide a valid path for the project")
def load_mix_project(path) do
manifest = load_manifest(path)
apps = get_apps(manifest[:apps_path], path)
lock_file_name = get_lock_file_name(manifest[:lock_file])
lock_file_path = Path.join(path, lock_file_name)
lock_file = read_file(lock_file_path)
%{
manifest: manifest,
lock: lock_file,
apps: apps
}
end
defp read_file(path) do
Path.expand(path)
|> Code.eval_file()
end
defp get_lock_file_name(nil), do: get_lock_file_name("")
defp get_lock_file_name(""), do: "mix.lock"
defp get_lock_file_name(filename), do: filename
defp load_manifest(path), do: load_manifest(path, "root_app")
defp load_manifest(path, app) do
Mix.Project.in_project(String.to_atom(app), path, fn module ->
module.project ++ [module_name: inspect(module)]
end)
end
defp get_apps(nil, _), do: nil
defp get_apps(apps_path, path) do
Path.join([path, apps_path, "/*/mix.exs"])
|> Path.absname
|> Path.wildcard
|> Enum.map(fn path -> Path.dirname(path) end)
|> Enum.reduce(
%{},
fn full_project_path, acc ->
relative_project_path = Path.relative_to(full_project_path, path)
Map.put(acc, relative_project_path, load_manifest(full_project_path, relative_project_path))
end
)
end
end
| 28 | 103 | 0.677224 |
9eb85e92cf07bc0bad7f4b30da83e7a3024beab7 | 1,407 | exs | Elixir | test/still/compiler/incremental/node_test.exs | fmterrorf/still | fd316bcc38cdf618444dbd36ec9d259fe5256a3e | [
"0BSD"
] | null | null | null | test/still/compiler/incremental/node_test.exs | fmterrorf/still | fd316bcc38cdf618444dbd36ec9d259fe5256a3e | [
"0BSD"
] | null | null | null | test/still/compiler/incremental/node_test.exs | fmterrorf/still | fd316bcc38cdf618444dbd36ec9d259fe5256a3e | [
"0BSD"
] | null | null | null | defmodule Still.Compiler.Incremental.NodeTest do
use Still.Case, async: false
alias Still.Compiler.Incremental.{Registry, Node}
alias Still.Preprocessor.{Frontmatter, Slime, AddLayout, AddContent, Save, OutputPath}
@preprocessors [
AddContent,
Frontmatter,
Slime,
AddLayout,
OutputPath,
Save
]
setup do
Application.put_env(:still, :preprocessors, %{
".slime" => @preprocessors
})
end
describe "compile" do
test "compiles a file" do
pid = Registry.get_or_create_file_process("about.slime")
Node.compile(pid)
assert File.exists?(get_output_path("about.html"))
end
test "notifies subscribers" do
file_pid = Registry.get_or_create_file_process("about.slime")
:erlang.trace(file_pid, true, [:receive])
other_pid = Registry.get_or_create_file_process("_includes/header.slime")
Node.render(other_pid, %{dependency_chain: ["about.slime"]}, "about.slime")
Node.compile(other_pid)
assert_receive {:trace, ^file_pid, :receive, {:"$gen_call", _, :compile}}, 500
end
end
describe "render" do
test "renders a file" do
pid = Registry.get_or_create_file_process("_includes/header.slime")
content = Node.render(pid, %{dependency_chain: ["about.slime"]}, "about.slime")
assert %{content: "<header><p>This is a header</p></header>"} = content
end
end
end
| 25.125 | 88 | 0.675195 |
9eb87cd24a391f52a69417621c3d1eac99070d5e | 2,332 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/absolute_date_range.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/absolute_date_range.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/absolute_date_range.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdExchangeBuyer.V2beta1.Model.AbsoluteDateRange do
@moduledoc """
An absolute date range, specified by its start date and end date. The supported range of dates begins 30 days before today and ends today. Validity checked upon filter set creation. If a filter set with an absolute date range is run at a later date more than 30 days after start_date, it will fail.
## Attributes
* `endDate` (*type:* `GoogleApi.AdExchangeBuyer.V2beta1.Model.Date.t`, *default:* `nil`) - The end date of the range (inclusive). Must be within the 30 days leading up to current date, and must be equal to or after start_date.
* `startDate` (*type:* `GoogleApi.AdExchangeBuyer.V2beta1.Model.Date.t`, *default:* `nil`) - The start date of the range (inclusive). Must be within the 30 days leading up to current date, and must be equal to or before end_date.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:endDate => GoogleApi.AdExchangeBuyer.V2beta1.Model.Date.t() | nil,
:startDate => GoogleApi.AdExchangeBuyer.V2beta1.Model.Date.t() | nil
}
field(:endDate, as: GoogleApi.AdExchangeBuyer.V2beta1.Model.Date)
field(:startDate, as: GoogleApi.AdExchangeBuyer.V2beta1.Model.Date)
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.AbsoluteDateRange do
def decode(value, options) do
GoogleApi.AdExchangeBuyer.V2beta1.Model.AbsoluteDateRange.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.AbsoluteDateRange do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.64 | 300 | 0.754288 |
9eb90072b7eba14735a454dcf84a7da565248140 | 1,918 | exs | Elixir | test/conduit/blog/aggregates/comment_test.exs | rudyyazdi/conduit | 8defa60962482fb81f5093ea5d58b71a160db3c4 | [
"MIT"
] | null | null | null | test/conduit/blog/aggregates/comment_test.exs | rudyyazdi/conduit | 8defa60962482fb81f5093ea5d58b71a160db3c4 | [
"MIT"
] | 2 | 2022-01-15T02:09:30.000Z | 2022-01-22T10:18:43.000Z | test/conduit/blog/aggregates/comment_test.exs | rudyyazdi/conduit | 8defa60962482fb81f5093ea5d58b71a160db3c4 | [
"MIT"
] | null | null | null | defmodule Conduit.Blog.CommentTest do
use Conduit.AggregateCase, aggregate: Conduit.Blog.Aggregates.Comment
alias Conduit.Blog.Commands.{
DeleteComment,
}
alias Conduit.Blog.Events.{
ArticleCommented,
CommentDeleted,
}
describe "comment on article" do
@tag :unit
test "should succeed when valid" do
comment_uuid = UUID.uuid4()
article_uuid = UUID.uuid4()
author_uuid = UUID.uuid4()
assert_events build(:comment_on_article, comment_uuid: comment_uuid, article_uuid: article_uuid, author_uuid: author_uuid), [
%ArticleCommented{
comment_uuid: comment_uuid,
body: "It takes a Jacobian",
article_uuid: article_uuid,
author_uuid: author_uuid,
},
]
end
end
describe "delete comment" do
@tag :unit
test "should succeed when deleted by comment author" do
comment_uuid = UUID.uuid4()
article_uuid = UUID.uuid4()
user_uuid = UUID.uuid4()
assert_events [
build(:comment_on_article, comment_uuid: comment_uuid, article_uuid: article_uuid, author_uuid: user_uuid),
%DeleteComment{comment_uuid: comment_uuid, deleted_by_author_uuid: user_uuid},
], [
%CommentDeleted{
comment_uuid: comment_uuid,
article_uuid: article_uuid,
author_uuid: user_uuid,
},
]
end
@tag :unit
test "should fail when delete attempted by another user" do
comment_uuid = UUID.uuid4()
article_uuid = UUID.uuid4()
author_uuid = UUID.uuid4()
deleted_by_author_uuid = UUID.uuid4()
assert_error [
build(:comment_on_article, comment_uuid: comment_uuid, article_uuid: article_uuid, author_uuid: author_uuid),
%DeleteComment{comment_uuid: comment_uuid, deleted_by_author_uuid: deleted_by_author_uuid},
], {:error, :only_comment_author_can_delete}
end
end
end
| 29.96875 | 131 | 0.67414 |
9eb96743038fbbe2f655871bd6d0f1347ba49fdc | 214 | ex | Elixir | lib/retrospectivex_web/channels/user_socket.ex | dreamingechoes/retrospectivex | cad0df6cfde5376121d841f4a8b36861b6ec5d45 | [
"MIT"
] | 5 | 2018-06-27T17:51:51.000Z | 2020-10-05T09:59:04.000Z | lib/retrospectivex_web/channels/user_socket.ex | dreamingechoes/retrospectivex | cad0df6cfde5376121d841f4a8b36861b6ec5d45 | [
"MIT"
] | 1 | 2018-10-08T11:33:12.000Z | 2018-10-08T11:33:12.000Z | lib/retrospectivex_web/channels/user_socket.ex | dreamingechoes/retrospectivex | cad0df6cfde5376121d841f4a8b36861b6ec5d45 | [
"MIT"
] | 2 | 2018-10-08T11:31:55.000Z | 2020-10-05T09:59:05.000Z | defmodule RetrospectivexWeb.UserSocket do
use Phoenix.Socket
## Channels
channel("frankt", RetrospectivexWeb.FranktChannel)
def connect(_params, socket), do: {:ok, socket}
def id(_socket), do: nil
end
| 19.454545 | 52 | 0.738318 |
9eb9b77a1f598328d493a65c2a043d190b0405dd | 1,394 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/http_health_check.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/http_health_check.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/http_health_check.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.HttpHealthCheck do
@moduledoc """
An HttpHealthCheck resource. This resource defines a template for how individual instances should be checked for health, via HTTP.
"""
@derive [Poison.Encoder]
defstruct [
:"host",
:"checkIntervalSec",
:"creationTimestamp",
:"description",
:"healthyThreshold",
:"id",
:"kind",
:"name",
:"port",
:"requestPath",
:"selfLink",
:"timeoutSec",
:"unhealthyThreshold"
]
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.HttpHealthCheck do
def decode(value, _options) do
value
end
end
| 28.44898 | 132 | 0.718077 |
9eb9c8eac3e5515611515c4fa0d42315925d6d72 | 1,567 | ex | Elixir | clients/manufacturers/lib/google_api/manufacturers/v1/model/destination_status.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/manufacturers/lib/google_api/manufacturers/v1/model/destination_status.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/manufacturers/lib/google_api/manufacturers/v1/model/destination_status.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Manufacturers.V1.Model.DestinationStatus do
@moduledoc """
The destination status.
## Attributes
* `destination` (*type:* `String.t`, *default:* `nil`) - The name of the destination.
* `status` (*type:* `String.t`, *default:* `nil`) - The status of the destination.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:destination => String.t() | nil,
:status => String.t() | nil
}
field(:destination)
field(:status)
end
defimpl Poison.Decoder, for: GoogleApi.Manufacturers.V1.Model.DestinationStatus do
def decode(value, options) do
GoogleApi.Manufacturers.V1.Model.DestinationStatus.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Manufacturers.V1.Model.DestinationStatus do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.34 | 89 | 0.723038 |
9eb9ca78d34a0acb661be1a793efc0ab2b9167bf | 1,435 | ex | Elixir | lib/packet/utils.ex | qwexvf/McProtocol | a9e243e2d6de7b51e049881c8ea8f12bff94feb1 | [
"MIT"
] | 21 | 2016-04-23T03:54:33.000Z | 2021-07-08T12:03:44.000Z | lib/packet/utils.ex | qwexvf/McProtocol | a9e243e2d6de7b51e049881c8ea8f12bff94feb1 | [
"MIT"
] | 2 | 2016-04-06T07:01:18.000Z | 2017-04-15T11:09:23.000Z | lib/packet/utils.ex | hansihe/elixir_mc_protocol | 4b3011338af573c6f583f541c410fb23574f4c10 | [
"MIT"
] | 9 | 2016-04-09T21:05:48.000Z | 2021-07-27T12:42:49.000Z | defmodule McProtocol.Packet.Utils do
@moduledoc false
def state_name_to_ident("play"), do: :Play
def state_name_to_ident("handshaking"), do: :Handshake
def state_name_to_ident("status"), do: :Status
def state_name_to_ident("login"), do: :Login
def direction_name_to_ident("toClient"), do: :Server
def direction_name_to_ident("toServer"), do: :Client
def extract_packet_mappings(typ) do
["container", [id_mapper, name_switch]] = typ
%{"name" => "name", "type" => ["mapper", %{"mappings" => id_mappings}]} = id_mapper
%{"name" => "params", "type" => ["switch", %{"fields" => name_fields}]} = name_switch
Enum.map(id_mappings, fn {hex_id, packet_name} ->
id = parse_hex_num(hex_id)
name = packet_name
|> Macro.camelize
|> McProtocol.Packet.Overrides.packet_name
|> String.to_atom
type_name = name_fields[packet_name]
{id, name, type_name}
end)
end
def parse_hex_num("0x" <> num) do
{parsed, ""} = Integer.parse(num, 16)
parsed
end
def make_module_name(direction, state, ident) do
Module.concat([
McProtocol.Packet,
direction,
state,
ident
])
end
def pmap(collection, fun) do
me = self
collection
|> Enum.map(fn (elem) ->
spawn_link fn -> (send me, { self, fun.(elem) }) end
end)
|> Enum.map(fn (pid) ->
receive do { ^pid, result } -> result end
end)
end
end
| 26.090909 | 89 | 0.626481 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.