hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
731d3b1c47741081e3b8533bb7efc5588f6bc665 | 460 | exs | Elixir | test/test_helper.exs | knewter/hound | 49ca7d71f7fb0d9a9de14afa86ca1a1fe5ae1278 | [
"MIT"
] | 1 | 2015-07-08T04:32:02.000Z | 2015-07-08T04:32:02.000Z | test/test_helper.exs | knewter/hound | 49ca7d71f7fb0d9a9de14afa86ca1a1fe5ae1278 | [
"MIT"
] | null | null | null | test/test_helper.exs | knewter/hound | 49ca7d71f7fb0d9a9de14afa86ca1a1fe5ae1278 | [
"MIT"
] | null | null | null | :application.start :inets
test_server_config = [
port: 9090,
server_name: 'hound_test_server',
server_root: Path.absname('test/sample_pages'),
document_root: Path.absname('test/sample_pages'),
bind_address: {127,0,0,1}
]
{:ok, pid} = :inets.start(:httpd, test_server_config)
{:ok, _hound_pid} = Hound.start([driver: System.get_env("WEBDRIVER")])
System.at_exit fn(_exit_status) ->
:ok = :inets.stop(:httpd, pid)
end
ExUnit.start [max_cases: 5]
| 24.210526 | 70 | 0.715217 |
731d70718b992893ea7fd815d40201e2feb6e943 | 2,501 | exs | Elixir | apps/banking_api/test/accounts_test.exs | ckoliveira/banking_api | 3a4077a97e2cfcb2475bafd582a836bd03f6c6cc | [
"Apache-2.0"
] | null | null | null | apps/banking_api/test/accounts_test.exs | ckoliveira/banking_api | 3a4077a97e2cfcb2475bafd582a836bd03f6c6cc | [
"Apache-2.0"
] | null | null | null | apps/banking_api/test/accounts_test.exs | ckoliveira/banking_api | 3a4077a97e2cfcb2475bafd582a836bd03f6c6cc | [
"Apache-2.0"
] | null | null | null | defmodule BankingApi.AccountsTest do
use BankingApi.DataCase, async: true
alias BankingApi.User
alias BankingApi.Account
describe "test withdraws" do
setup do
password = "8ji898jdjav"
params = %{
name: "user",
cpf: "12332112331",
password_hash: Argon2.hash_pwd_salt(password)
}
user = User.create(params)
{:ok, params: params, user: user, password: password}
end
test "try to withdraw from account with enough balance", ctx do
params = ctx.params
assert {:ok, _account} = Account.withdraw(params.cpf, ctx.password, 500)
end
test "try to withdraw from account with not enough balance", ctx do
params = ctx.params
assert {:error, :not_enough_balance} = Account.withdraw(params.cpf, ctx.password, 1001)
end
test "try to withdraw from non existent account" do
pwd = "8h598thuhf4"
cpf = "11122433345"
assert {:error, :user_not_found} = Account.withdraw(cpf, pwd, 500)
end
end
describe "test transferences" do
setup do
pwd1 = "7yhf9h85g8h8"
pwd2 = "nij09j48j8j92"
params1 = %{
cpf: "11122233345",
name: "maurizio",
password_hash: Argon2.hash_pwd_salt(pwd1)
}
params2 = %{
cpf: "55544433321",
name: "julia",
password_hash: Argon2.hash_pwd_salt(pwd2)
}
User.create(params1)
User.create(params2)
{:ok, params1: params1, params2: params2, pwd1: pwd1, pwd2: pwd2}
end
test "try to transfer from account with enough balance", ctx do
params1 = ctx.params1
params2 = ctx.params2
assert {:ok, _account1, _account2} =
Account.transfer(params1.cpf, ctx.pwd1, params2.cpf, 100)
end
test "try to transfer from account with not enough balance", ctx do
params1 = ctx.params1
params2 = ctx.params2
assert {:error, :not_enough_balance} =
Account.transfer(params1.cpf, ctx.pwd1, params2.cpf, 1001)
end
test "try to transfer to non existent account", ctx do
params1 = ctx.params1
assert {:error, :user_not_found} =
Account.transfer(params1.cpf, ctx.pwd1, "99988877700", 200)
end
test "try to to transfer passing wrong password", ctx do
params1 = ctx.params1
params2 = ctx.params2
assert {:error, :invalid_password} =
Account.transfer(params1.cpf, "wrong password", params2.cpf, 200)
end
end
end
| 26.606383 | 93 | 0.628948 |
731db053cd8239629defef38086cbae0afc44638 | 3,635 | exs | Elixir | test/books_web/live/page_live_test.exs | nickagliano/books | eec595ed4add9d678278785d9ab10106e1e426d1 | [
"MIT"
] | null | null | null | test/books_web/live/page_live_test.exs | nickagliano/books | eec595ed4add9d678278785d9ab10106e1e426d1 | [
"MIT"
] | null | null | null | test/books_web/live/page_live_test.exs | nickagliano/books | eec595ed4add9d678278785d9ab10106e1e426d1 | [
"MIT"
] | null | null | null | defmodule BooksWeb.PageLiveTest do
use BooksWeb.ConnCase
import Phoenix.LiveViewTest
alias Books.Pages
@create_attrs %{passive_ability: 42, serial_number: 42, title: "some title", type: 42}
@update_attrs %{passive_ability: 43, serial_number: 43, title: "some updated title", type: 43}
@invalid_attrs %{passive_ability: nil, serial_number: nil, title: nil, type: nil}
defp fixture(:page) do
{:ok, page} = Pages.create_page(@create_attrs)
page
end
defp create_page(_) do
page = fixture(:page)
%{page: page}
end
describe "Index" do
setup [:create_page]
test "lists all pages", %{conn: conn, page: page} do
{:ok, _index_live, html} = live(conn, Routes.page_index_path(conn, :index))
assert html =~ "Listing Pages"
assert html =~ page.title
end
test "saves new page", %{conn: conn} do
{:ok, index_live, _html} = live(conn, Routes.page_index_path(conn, :index))
assert index_live |> element("a", "New Page") |> render_click() =~
"New Page"
assert_patch(index_live, Routes.page_index_path(conn, :new))
assert index_live
|> form("#page-form", page: @invalid_attrs)
|> render_change() =~ "can't be blank"
{:ok, _, html} =
index_live
|> form("#page-form", page: @create_attrs)
|> render_submit()
|> follow_redirect(conn, Routes.page_index_path(conn, :index))
assert html =~ "Page created successfully"
assert html =~ "some title"
end
test "updates page in listing", %{conn: conn, page: page} do
{:ok, index_live, _html} = live(conn, Routes.page_index_path(conn, :index))
assert index_live |> element("#page-#{page.id} a", "Edit") |> render_click() =~
"Edit Page"
assert_patch(index_live, Routes.page_index_path(conn, :edit, page))
assert index_live
|> form("#page-form", page: @invalid_attrs)
|> render_change() =~ "can't be blank"
{:ok, _, html} =
index_live
|> form("#page-form", page: @update_attrs)
|> render_submit()
|> follow_redirect(conn, Routes.page_index_path(conn, :index))
assert html =~ "Page updated successfully"
assert html =~ "some updated title"
end
test "deletes page in listing", %{conn: conn, page: page} do
{:ok, index_live, _html} = live(conn, Routes.page_index_path(conn, :index))
assert index_live |> element("#page-#{page.id} a", "Delete") |> render_click()
refute has_element?(index_live, "#page-#{page.id}")
end
end
describe "Show" do
setup [:create_page]
test "displays page", %{conn: conn, page: page} do
{:ok, _show_live, html} = live(conn, Routes.page_show_path(conn, :show, page))
assert html =~ "Show Page"
assert html =~ page.title
end
test "updates page within modal", %{conn: conn, page: page} do
{:ok, show_live, _html} = live(conn, Routes.page_show_path(conn, :show, page))
assert show_live |> element("a", "Edit") |> render_click() =~
"Edit Page"
assert_patch(show_live, Routes.page_show_path(conn, :edit, page))
assert show_live
|> form("#page-form", page: @invalid_attrs)
|> render_change() =~ "can't be blank"
{:ok, _, html} =
show_live
|> form("#page-form", page: @update_attrs)
|> render_submit()
|> follow_redirect(conn, Routes.page_show_path(conn, :show, page))
assert html =~ "Page updated successfully"
assert html =~ "some updated title"
end
end
end
| 31.068376 | 96 | 0.611279 |
731dc4d987876553432ce0497f3385a39f3d7918 | 1,610 | exs | Elixir | mix.exs | venndr/open_exchange_rates | 1e679ac05d8ac924a7329543ec61bd92a0a646ba | [
"MIT"
] | null | null | null | mix.exs | venndr/open_exchange_rates | 1e679ac05d8ac924a7329543ec61bd92a0a646ba | [
"MIT"
] | null | null | null | mix.exs | venndr/open_exchange_rates | 1e679ac05d8ac924a7329543ec61bd92a0a646ba | [
"MIT"
] | null | null | null | defmodule OpenExchangeRates.Mixfile do
use Mix.Project
def project do
[
app: :open_exchange_rates,
description: "A library that can convert currencies and uses automatic updating of the currrency rates using openexchangerates.org",
package: package(),
version: "0.3.0",
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps(),
docs: [extras: ["README.md", "CHANGELOG.md"]],
preferred_cli_env: [coveralls: :test, "coveralls.detail": :test, "coveralls.post": :test, "coveralls.html": :test],
test_coverage: [tool: ExCoveralls]
]
end
def application do
[
mod: {OpenExchangeRates, []},
applications: [:logger, :httpoison, :currency_formatter]
]
end
defp deps do
[
{:httpoison, "~> 1.6"},
{:poison, "~> 3.1"},
{:exvcr, "~> 0.11", only: [:dev, :test]},
{:credo, "~> 1.4", only: [:dev, :test]},
{:mix_test_watch, "~> 1.0", only: [:dev, :test]},
{:ex_doc, "~> 0.22", only: [:dev, :test]},
{:excoveralls, "~> 0.13", only: :test},
{:currency_formatter, "~> 0.8"}
]
end
defp package do
[
maintainers: ["Gerard de Brieder"],
licenses: ["MIT"],
files: ["lib", "priv", "mix.exs", "README*", "LICENSE*", "CHANGELOG*"],
links: %{
"GitHub" => "https://github.com/smeevil/open_exchange_rates",
"Docs" => "https://smeevil.github.com/open_exchange_rates",
"Changelog" => "https://smeevil.github.com/open_exchange_rates/changelog.html",
}
]
end
end
| 30.377358 | 138 | 0.571429 |
731dcb0b2bed49ead98bbcb1d6840923b66245a4 | 1,131 | exs | Elixir | vivaldi/test/coordinate_stash_test.exs | pixyj/vivaldi | 771f8d84eae9ad285b3b4f92de832b9a165d17fa | [
"MIT"
] | 13 | 2017-02-06T11:37:22.000Z | 2022-01-31T20:09:27.000Z | vivaldi/test/coordinate_stash_test.exs | pixyj/vivaldi | 771f8d84eae9ad285b3b4f92de832b9a165d17fa | [
"MIT"
] | null | null | null | vivaldi/test/coordinate_stash_test.exs | pixyj/vivaldi | 771f8d84eae9ad285b3b4f92de832b9a165d17fa | [
"MIT"
] | 1 | 2020-02-26T13:44:12.000Z | 2020-02-26T13:44:12.000Z | defmodule CoordinateStashTest do
use ExUnit.Case
alias Vivaldi.Peer.{Config, CoordinateStash}
test "Test get and set" do
conf_one = [
node_id: :a,
node_name: :"[email protected]",
session_id: 1,
peers: [],
vivaldi_ce: 0.5
]
conf_two = [
node_id: :b,
node_name: :"[email protected]",
session_id: 1,
peers: [],
vector_dimension: 3,
vivaldi_ce: 0.5
]
conf_one = Config.new(conf_one)
conf_two = Config.new(conf_two)
zero_one = %{vector: [0, 0], height: 10.0e-6, error: 1.5}
zero_two = %{vector: [0, 0, 0], height: 10.0e-6, error: 1.5}
CoordinateStash.start_link(conf_one)
CoordinateStash.start_link(conf_two)
assert CoordinateStash.get_coordinate(:a) == zero_one
assert CoordinateStash.get_coordinate(:b) == zero_two
one = %{vector: [1, 2], height: 100.0e-6}
two = %{vector: [3, 2, 1], height: 100.0e-6}
CoordinateStash.set_coordinate(:a, one)
CoordinateStash.set_coordinate(:b, two)
assert CoordinateStash.get_coordinate(:a) == one
assert CoordinateStash.get_coordinate(:b) == two
end
end | 24.06383 | 64 | 0.624226 |
731dd1d41ad4d1dfded0a13960ef8ba95ecf4e9a | 572 | ex | Elixir | lib/mipha_web/controllers/admin/company_controller.ex | ZPVIP/mipha | a7df054f72eec7de88b60d94c501488375bdff6a | [
"MIT"
] | 156 | 2018-06-01T19:52:32.000Z | 2022-02-03T10:58:10.000Z | lib/mipha_web/controllers/admin/company_controller.ex | ZPVIP/mipha | a7df054f72eec7de88b60d94c501488375bdff6a | [
"MIT"
] | 139 | 2018-07-10T01:57:23.000Z | 2021-08-02T21:29:24.000Z | lib/mipha_web/controllers/admin/company_controller.ex | ZPVIP/mipha | a7df054f72eec7de88b60d94c501488375bdff6a | [
"MIT"
] | 29 | 2018-07-17T08:43:45.000Z | 2021-12-14T13:45:30.000Z | defmodule MiphaWeb.Admin.CompanyController do
use MiphaWeb, :controller
alias Mipha.Accounts
alias Mipha.Accounts.Queries
def index(conn, params) do
result = Queries.list_companies() |> Turbo.Ecto.turbo(params)
render(conn, :index, companies: result.datas, paginate: result.paginate)
end
def delete(conn, %{"id" => id}) do
company = Accounts.get_company!(id)
{:ok, _company} = Accounts.delete_company(company)
conn
|> put_flash(:info, "Company deleted successfully.")
|> redirect(to: admin_company_path(conn, :index))
end
end
| 27.238095 | 76 | 0.706294 |
731de3c4a65e3e5db0d6e886f46145b3230a764e | 615 | exs | Elixir | test/integration/unit_test/import_test.exs | elbow-jason/zigler | 3de4d6fe4def265b689bd21d3e0abad551bd2d50 | [
"MIT"
] | null | null | null | test/integration/unit_test/import_test.exs | elbow-jason/zigler | 3de4d6fe4def265b689bd21d3e0abad551bd2d50 | [
"MIT"
] | null | null | null | test/integration/unit_test/import_test.exs | elbow-jason/zigler | 3de4d6fe4def265b689bd21d3e0abad551bd2d50 | [
"MIT"
] | null | null | null | defmodule ZiglerTest.Integration.ZigTest.TransitiveTest do
use ExUnit.Case, async: true
use Zigler
import Zigler.Unit
alias ZiglerTest.ZigTest.Transitive
@moduletag :zigtest
zigtest Transitive
test "transitive zigtest inclusion via import" do
assert function_exported?(__MODULE__, :"imported test", 0)
end
test "transitive zigtest inclusion via usingnamespace" do
assert function_exported?(__MODULE__, :"namespaced test", 0)
end
test "no inclusion for non-pub imports" do
refute function_exported?(__MODULE__, :"non-pub test", 0)
assert Transitive.foo() == 47
end
end
| 23.653846 | 64 | 0.746341 |
731df7a136e98cb7e5c9297a9050a50f1aef236c | 1,801 | ex | Elixir | elixir/ocr-numbers/lib/ocr_numbers.ex | paulfioravanti/exercism | b3bf15814b89b0ef50b9bf062b3c800e80e37e75 | [
"MIT"
] | 6 | 2019-06-19T15:43:20.000Z | 2020-07-17T19:46:09.000Z | elixir/ocr-numbers/lib/ocr_numbers.ex | paulfioravanti/exercism | b3bf15814b89b0ef50b9bf062b3c800e80e37e75 | [
"MIT"
] | 10 | 2021-05-10T21:02:55.000Z | 2021-05-11T20:29:41.000Z | elixir/ocr-numbers/lib/ocr_numbers.ex | paulfioravanti/exercism | b3bf15814b89b0ef50b9bf062b3c800e80e37e75 | [
"MIT"
] | 1 | 2019-06-25T10:42:14.000Z | 2019-06-25T10:42:14.000Z | defmodule OcrNumbers do
@conversions %{
" _ | ||_| " => "0",
" | | " => "1",
" _ _||_ " => "2",
" _ _| _| " => "3",
" |_| | " => "4",
" _ |_ _| " => "5",
" _ |_ |_| " => "6",
" _ | | " => "7",
" _ |_||_| " => "8",
" _ |_| _| " => "9"
}
@height 4
@width 3
@unrecognised "?"
@doc """
Given a 3 x 4 grid of pipes, underscores, and spaces, determine which number is represented, or
whether it is garbled.
"""
@spec convert([String.t()]) :: String.t()
def convert(input) do
with :ok <- check_height(input),
:ok <- check_width(input) do
{:ok, to_number(input)}
end
end
defp check_height(input) do
if valid_height?(input) do
:ok
else
{:error, 'invalid line count'}
end
end
defp valid_height?(input) do
input
|> length()
|> rem(@height)
|> Kernel.==(0)
end
defp check_width(input) do
if Enum.all?(input, &valid_width?/1) do
:ok
else
{:error, 'invalid column count'}
end
end
defp valid_width?(input) do
input
|> String.length()
|> rem(@width)
|> Kernel.==(0)
end
defp to_number(input) do
input
|> Enum.chunk_every(@height)
|> Enum.map(&convert_row/1)
|> Enum.join(",")
end
defp convert_row(row) do
row
|> Enum.map(&split_by_digits/1)
|> transpose()
|> Enum.map(&Enum.join/1)
|> Enum.map(&convert_to_number/1)
|> Enum.join()
end
defp split_by_digits(line) do
line
|> String.graphemes()
|> Enum.chunk_every(@width)
|> Enum.map(&Enum.join/1)
end
defp transpose(row) do
row
|> Enum.zip()
|> Enum.map(&Tuple.to_list/1)
end
def convert_to_number(ocr), do: Map.get(@conversions, ocr, @unrecognised)
end
| 19.791209 | 97 | 0.522488 |
731e0347c49803a6bc628453fc6b9341cdfaa7c7 | 63 | ex | Elixir | lib/karroake_web/views/song_view.ex | ontanj/karroake | b3346707df9d2378a06ee3bb7d218d0962fe7d86 | [
"MIT"
] | null | null | null | lib/karroake_web/views/song_view.ex | ontanj/karroake | b3346707df9d2378a06ee3bb7d218d0962fe7d86 | [
"MIT"
] | 1 | 2021-05-10T10:36:59.000Z | 2021-05-10T10:36:59.000Z | lib/karroake_web/views/song_view.ex | ontanj/karroake | b3346707df9d2378a06ee3bb7d218d0962fe7d86 | [
"MIT"
] | null | null | null | defmodule KarroakeWeb.SongView do
use KarroakeWeb, :view
end
| 15.75 | 33 | 0.809524 |
731e2e900aca97085affaf0081dcc09204a6607b | 1,401 | exs | Elixir | test/beanstix_test.exs | fitronics/beanstix | 8e0da4596a83b71981adda8c5ee76ff76aaa877c | [
"MIT"
] | 1 | 2018-09-02T21:34:47.000Z | 2018-09-02T21:34:47.000Z | test/beanstix_test.exs | fitronics/beanstix | 8e0da4596a83b71981adda8c5ee76ff76aaa877c | [
"MIT"
] | null | null | null | test/beanstix_test.exs | fitronics/beanstix | 8e0da4596a83b71981adda8c5ee76ff76aaa877c | [
"MIT"
] | 1 | 2019-11-13T16:46:47.000Z | 2019-11-13T16:46:47.000Z | defmodule BeanstixTest do
use ExUnit.Case
doctest Beanstix
@moduletag :simple
@data "simple"
setup context do
Beanstix.TestHelpers.setup_connection(context)
end
test "put", %{pid: pid} do
assert {:ok, job_id} = Beanstix.put(pid, @data)
assert {:ok, {^job_id, @data}} = Beanstix.reserve(pid)
assert {:ok, :deleted} = Beanstix.delete(pid, job_id)
end
test "put!", %{pid: pid} do
job_id = Beanstix.put!(pid, @data)
assert {^job_id, @data} = Beanstix.reserve!(pid)
assert :deleted = Beanstix.delete!(pid, job_id)
end
test "put_in_tube", %{pid: pid, tube: tube} do
assert {:ok, job_id} = Beanstix.put_in_tube(pid, tube, @data)
assert {:ok, {^job_id, @data}} = Beanstix.reserve(pid)
assert {:ok, :deleted} = Beanstix.delete(pid, job_id)
end
test "put_in_tube!", %{pid: pid, tube: tube} do
job_id = Beanstix.put_in_tube!(pid, tube, @data)
assert {^job_id, @data} = Beanstix.reserve!(pid)
assert :deleted = Beanstix.delete!(pid, job_id)
end
test "release", %{pid: pid} do
assert {:ok, job_id} = Beanstix.put(pid, @data)
assert {:ok, {^job_id, @data}} = Beanstix.reserve(pid)
assert {:ok, :timed_out} = Beanstix.reserve(pid, 0)
assert {:ok, :released} = Beanstix.release(pid, job_id)
assert {:ok, {^job_id, @data}} = Beanstix.reserve(pid)
assert :deleted = Beanstix.delete!(pid, job_id)
end
end
| 31.133333 | 65 | 0.645253 |
731e3d27e675ef95c0c67e4375e1f644df060166 | 14,324 | exs | Elixir | test/bitcrowd_ecto/assertions_test.exs | bitcrowd/bitcrowd_ecto | 6e2d41d7dbe1fa1398b545c12385cde2db94285c | [
"Apache-2.0"
] | 2 | 2022-01-11T08:03:49.000Z | 2022-02-11T10:25:30.000Z | test/bitcrowd_ecto/assertions_test.exs | bitcrowd/bitcrowd_ecto | 6e2d41d7dbe1fa1398b545c12385cde2db94285c | [
"Apache-2.0"
] | 3 | 2022-02-03T21:28:14.000Z | 2022-03-25T09:03:15.000Z | test/bitcrowd_ecto/assertions_test.exs | bitcrowd/bitcrowd_ecto | 6e2d41d7dbe1fa1398b545c12385cde2db94285c | [
"Apache-2.0"
] | null | null | null | # SPDX-License-Identifier: Apache-2.0
defmodule BitcrowdEcto.AssertionsTest do
use BitcrowdEcto.TestCase, async: true
import BitcrowdEcto.{Assertions, TestSchema}
doctest BitcrowdEcto.Assertions, import: true
describe "flat_errors_on/2" do
test "flattens all errors and their validation metadata into a list" do
cs =
changeset()
|> add_error(:some_string, "is wrong", validation: :wrong)
|> add_error(:some_string, "is really wrong", validation: :really_wrong)
|> add_error(:some_integer, "is also wrong", validation: :also_wrong)
assert flat_errors_on(cs, :some_string) == [
"is really wrong",
:really_wrong,
"is wrong",
:wrong
]
assert flat_errors_on(cs, :some_integer) == ["is also wrong", :also_wrong]
end
test "can fetch metadata by a given key" do
cs = changeset() |> add_error(:some_string, "is wrong", foo: :bar)
assert :bar in flat_errors_on(cs, :some_string, metadata: :foo)
assert :bar in flat_errors_on(cs, :some_string, metadata: [:foo])
end
end
describe "assert_error_on/4" do
test "asserts that a given error is present on a field" do
cs =
changeset()
|> validate_required(:some_string)
|> assert_error_on(:some_string, :required)
assert_raise ExUnit.AssertionError, fn ->
assert assert_error_on(cs, :some_integer, :length)
end
end
test "can assert on multiple errors" do
cs =
%{some_string: "foo", some_integer: 1}
|> changeset()
|> validate_length(:some_string, min: 10)
|> validate_inclusion(:some_string, ["bar"])
|> validate_inclusion(:some_integer, [5])
|> assert_error_on(:some_string, [:length, :inclusion])
assert_raise ExUnit.AssertionError, fn ->
assert_error_on(cs, :some_integer, [:inclusion, :number])
end
end
end
describe "assert_required_error_on/2" do
test "asserts on the :required error on a field" do
cs = changeset() |> validate_required(:some_string)
assert assert_required_error_on(cs, :some_string) == cs
assert_raise ExUnit.AssertionError, fn ->
assert_required_error_on(cs, :some_integer)
end
end
end
describe "assert_format_error_on/2" do
test "asserts on the :format error on a field" do
cs =
%{some_string: "foo"}
|> changeset()
|> validate_format(:some_string, ~r/bar/)
assert assert_format_error_on(cs, :some_string) == cs
assert_raise ExUnit.AssertionError, fn ->
assert_format_error_on(cs, :some_integer)
end
end
end
describe "assert_number_error_on/2" do
test "asserts on the :number error on a field" do
cs =
%{some_integer: 5}
|> changeset()
|> validate_number(:some_integer, greater_than: 5)
assert assert_number_error_on(cs, :some_integer) == cs
assert_raise ExUnit.AssertionError, fn ->
assert_number_error_on(cs, :some_string)
end
end
end
describe "assert_inclusion_error_on/2" do
test "asserts on the :inclusion error on a field" do
cs =
%{some_string: "foo"}
|> changeset()
|> validate_inclusion(:some_string, ["bar", "baz"])
assert assert_inclusion_error_on(cs, :some_string) == cs
assert_raise ExUnit.AssertionError, fn ->
assert_inclusion_error_on(cs, :some_integer)
end
end
end
describe "assert_acceptance_error_on/2" do
test "asserts on the :acceptance error on a field" do
cs =
%{"some_boolean" => false}
|> changeset()
|> validate_acceptance(:some_boolean)
assert assert_acceptance_error_on(cs, :some_boolean) == cs
assert_raise ExUnit.AssertionError, fn ->
assert_acceptance_error_on(cs, :some_integer)
end
end
end
# We don't have a constraints on the "test_schema" table, so we add the errors ourselves.
describe "assert_unique_constraint_error_on/2" do
test "asserts on the :unique error on a field" do
cs =
changeset()
|> add_error(:some_string, "has already been taken", constraint: :unique)
assert assert_unique_constraint_error_on(cs, :some_string) == cs
assert_raise ExUnit.AssertionError, fn ->
assert_unique_constraint_error_on(cs, :some_integer)
end
end
end
describe "assert_foreign_key_constraint_error_on/2" do
test "asserts on the :foreign error on a field" do
cs =
changeset()
|> add_error(:some_string, "does not exist", constraint: :foreign)
assert assert_foreign_key_constraint_error_on(cs, :some_string) == cs
assert_raise ExUnit.AssertionError, fn ->
assert_foreign_key_constraint_error_on(cs, :some_integer)
end
end
end
describe "assert_no_assoc_constraint_error_on/2" do
test "asserts on the :no_assoc error on a field" do
cs =
changeset()
|> add_error(:children, "is still associated with this entry", constraint: :no_assoc)
assert assert_no_assoc_constraint_error_on(cs, :children) == cs
assert_raise ExUnit.AssertionError, fn ->
assert_no_assoc_constraint_error_on(cs, :parent)
end
end
end
describe "assert_foreign_key_constraint_on/3" do
test "asserts on the :foreign_key/:foreign constraint on a field" do
cs =
%TestSchema{}
|> change(%{})
|> foreign_key_constraint(:some_string, name: "some-name")
assert assert_foreign_key_constraint_on(cs, :some_string) == cs
assert assert_foreign_key_constraint_on(cs, :some_string, constraint: "some-name") == cs
assert_raise ExUnit.AssertionError, fn ->
assert_foreign_key_constraint_on(cs, :some_integer)
end
assert_raise ExUnit.AssertionError, fn ->
assert_foreign_key_constraint_on(cs, :some_string, constraint: "some-other-name")
end
end
end
describe "assert_no_assoc_constraint_on/3" do
test "asserts on the :foreign_key/:no_assoc constraint on a field" do
cs =
%TestSchema{}
|> change(%{})
|> no_assoc_constraint(:children, name: "some-name")
assert assert_no_assoc_constraint_on(cs, :children) == cs
assert assert_no_assoc_constraint_on(cs, :children, constraint: "some-name") == cs
assert_raise ExUnit.AssertionError, fn ->
assert_no_assoc_constraint_on(cs, :parent)
end
assert_raise ExUnit.AssertionError, fn ->
assert_no_assoc_constraint_on(cs, :children, constraint: "some-other-name")
end
end
end
describe "assert_unique_constraint_on/3" do
test "asserts on the :unique constraint on a field" do
cs =
%TestSchema{}
|> change(%{})
|> unique_constraint(:some_string, name: "some-name")
assert assert_unique_constraint_on(cs, :some_string) == cs
assert assert_unique_constraint_on(cs, :some_string, constraint: "some-name") == cs
assert_raise ExUnit.AssertionError, fn ->
assert_unique_constraint_on(cs, :some_integer)
end
assert_raise ExUnit.AssertionError, fn ->
assert_unique_constraint_on(cs, :some_string, constraint: "some-other-name")
end
end
end
describe "refute_errors_on/2" do
test "asserts that a field does not have errors" do
cs = changeset() |> validate_required(:some_string)
assert refute_errors_on(cs, :some_integer) == cs
assert_raise ExUnit.AssertionError, fn ->
refute_errors_on(cs, :some_string)
end
end
end
describe "assert_changes/2" do
test "asserts that a field is changed" do
cs = changeset(%{some_string: "foo"})
assert assert_changes(cs, :some_string) == cs
assert_raise ExUnit.AssertionError, fn ->
assert_changes(cs, :some_integer)
end
end
end
describe "assert_changes/3" do
test "asserts that a field is changed to a specific valud" do
cs = changeset(%{some_string: "foo"})
assert assert_changes(cs, :some_string, "foo") == cs
assert_raise ExUnit.AssertionError, fn ->
assert_changes(cs, :some_string, "bar")
end
end
end
describe "refute_changes/2" do
test "asserts that a field is not changed" do
cs = changeset(%{some_string: "foo"})
assert refute_changes(cs, :some_integer) == cs
assert_raise ExUnit.AssertionError, fn ->
refute_changes(cs, :some_string)
end
end
end
describe "assert_difference/4" do
setup do
%{agent: start_supervised!({Agent, fn -> 0 end})}
end
test "asserts that a given function changes the integer fetched by another function by a delta",
%{agent: agent} do
assert_difference(
fn -> Agent.get(agent, & &1) end,
1,
fn -> Agent.update(agent, fn x -> x + 1 end) end
)
assert_raise ExUnit.AssertionError, ~r/hasn't changed by 2/, fn ->
assert_difference(
fn -> Agent.get(agent, & &1) end,
2,
fn -> Agent.update(agent, fn x -> x + 1 end) end
)
end
end
test "accepts a message option to configure the error message", %{agent: agent} do
assert_raise ExUnit.AssertionError, ~r/boom/, fn ->
assert_difference(
fn -> Agent.get(agent, & &1) end,
2,
fn -> Agent.update(agent, fn x -> x + 1 end) end,
message: "boom"
)
end
end
end
describe "refute_difference/4" do
setup do
%{agent: start_supervised!({Agent, fn -> 0 end})}
end
test "asserts that a given function does not change the integer fetched by another function",
%{agent: agent} do
refute_difference(
fn -> Agent.get(agent, & &1) end,
fn -> nil end
)
assert_raise ExUnit.AssertionError, ~r/has changed/, fn ->
refute_difference(
fn -> Agent.get(agent, & &1) end,
fn -> Agent.update(agent, fn x -> x + 1 end) end
)
end
end
test "accepts a message option to configure the error message", %{agent: agent} do
assert_raise ExUnit.AssertionError, ~r/boom/, fn ->
refute_difference(
fn -> Agent.get(agent, & &1) end,
fn -> Agent.update(agent, fn x -> x + 1 end) end,
message: "boom"
)
end
end
end
describe "assert_count_difference/4" do
test "asserts that a given function changes the count of a given database table" do
assert_count_difference(TestRepo, TestSchema, 1, fn ->
insert(:test_schema)
end)
assert_raise ExUnit.AssertionError, ~r/TestSchema hasn't changed by 3/, fn ->
assert_count_difference(TestRepo, TestSchema, 3, fn ->
insert(:test_schema)
insert(:test_schema)
end)
end
end
end
describe "assert_count_differences/4" do
test "asserts that a given function changes the count of multiple database tables" do
assert_count_differences(TestRepo, [{TestSchema, 1}], fn ->
insert(:test_schema)
end)
assert_raise ExUnit.AssertionError, ~r/TestSchema hasn't changed by 3/, fn ->
assert_count_differences(TestRepo, [{TestSchema, 3}], fn ->
insert(:test_schema)
insert(:test_schema)
end)
end
end
end
describe "assert_preloaded/2" do
setup do
%{test_schema: insert(:test_schema)}
end
test "asserts that an Ecto struct has a preloaded nested struct at a given path", %{
test_schema: %{id: id}
} do
{:ok, test_schema} = TestRepo.fetch(TestSchema, id, preload: :children)
assert_preloaded(test_schema, [:children])
assert_preloaded(test_schema, :children)
assert_raise ExUnit.AssertionError, ~r/TestSchema has not loaded association/, fn ->
{:ok, test_schema} = TestRepo.fetch(TestSchema, id)
assert_preloaded(test_schema, [:children])
end
end
end
describe "refute_preloaded/2" do
setup do
%{test_schema: insert(:test_schema)}
end
test "asserts that an Ecto struct does not have a preloaded nested struct at a given path", %{
test_schema: %{id: id}
} do
{:ok, test_schema} = TestRepo.fetch(TestSchema, id)
refute_preloaded(test_schema, [:children])
refute_preloaded(test_schema, :children)
assert_raise ExUnit.AssertionError, ~r/TestSchema has preloaded association/, fn ->
{:ok, test_schema} = TestRepo.fetch(TestSchema, id, preload: :children)
refute_preloaded(test_schema, [:children])
end
end
end
describe "assert_change_to_almost_now/2" do
test "asserts that the given field changed to the present time" do
%{datetime: DateTime.utc_now()}
|> changeset()
|> assert_change_to_almost_now(:datetime)
assert_raise ExUnit.AssertionError, fn ->
%{datetime: DateTime.add(DateTime.utc_now(), -60_000_000)}
|> changeset()
|> assert_change_to_almost_now(:datetime)
end
end
test "fails if the given field is not a timestamp" do
assert_raise ExUnit.AssertionError, ~r/not a timestamp/, fn ->
%{some_integer: 1}
|> changeset()
|> assert_change_to_almost_now(:some_integer)
end
end
test "fails if the given field does not change" do
assert_raise ExUnit.AssertionError, ~r/didn't change/, fn ->
%{some_integer: 1}
|> changeset()
|> assert_change_to_almost_now(:datetime)
end
end
end
describe "assert_changeset_valid/1" do
test "asserts that a changesets 'valid?' flag is true" do
%{some_string: "Yuju!"}
|> changeset()
|> assert_changeset_valid()
assert_raise ExUnit.AssertionError, fn ->
%{some_string: 1_000}
|> changeset()
|> assert_changeset_valid()
end
end
end
describe "refute_changeset_valid/1" do
test "asserts that a changeset's 'valid?' flag is false" do
%{some_string: 1_000}
|> changeset()
|> refute_changeset_valid()
assert_raise ExUnit.AssertionError, fn ->
%{some_string: "Yuju!"}
|> changeset()
|> refute_changeset_valid()
end
end
end
end
| 30.02935 | 100 | 0.641022 |
731e6bc42ccf95e2f0b8a59eae3aa1ae4936f714 | 638 | ex | Elixir | lib/opensource_challenge_web/auth_error_handler.ex | topaxi/opensource-challenge-api | 27e5da9c5162a8f0f4533f9f7f14ca1d145bac31 | [
"MIT"
] | 3 | 2016-11-29T10:58:11.000Z | 2020-04-29T12:17:23.000Z | lib/opensource_challenge_web/auth_error_handler.ex | topaxi/opensource-challenge-api | 27e5da9c5162a8f0f4533f9f7f14ca1d145bac31 | [
"MIT"
] | 50 | 2016-12-04T15:02:31.000Z | 2021-07-23T04:21:55.000Z | lib/opensource_challenge_web/auth_error_handler.ex | topaxi/opensource-challenge-api | 27e5da9c5162a8f0f4533f9f7f14ca1d145bac31 | [
"MIT"
] | 5 | 2019-11-24T11:17:39.000Z | 2020-04-29T12:19:36.000Z | defmodule OpensourceChallengeWeb.AuthErrorHandler do
use OpensourceChallengeWeb, :controller
@behaviour Guardian.Plug.ErrorHandler
def unauthenticated(conn, _params \\ %{}) do
conn
|> put_status(401)
|> render(OpensourceChallengeWeb.ErrorView, "401.json-api")
|> halt
end
def unauthorized(conn, _params \\ %{}) do
conn
|> put_status(403)
|> render(OpensourceChallengeWeb.ErrorView, "403.json-api")
|> halt
end
@impl Guardian.Plug.ErrorHandler
def auth_error(conn, {type, _reason}, _opts) do
body = Jason.encode!(%{message: to_string(type)})
send_resp(conn, 401, body)
end
end
| 24.538462 | 63 | 0.69279 |
731e7059c4a0f4ad1e9eb367280218a80c62cfef | 241 | ex | Elixir | b1/lib/b1.ex | lajeryl/hangman | ad90b6114ac98414b4bddf69bf88b1601b5b1799 | [
"MIT"
] | 1 | 2022-02-22T04:56:25.000Z | 2022-02-22T04:56:25.000Z | b1/lib/b1.ex | lajeryl/hangman | ad90b6114ac98414b4bddf69bf88b1601b5b1799 | [
"MIT"
] | null | null | null | b1/lib/b1.ex | lajeryl/hangman | ad90b6114ac98414b4bddf69bf88b1601b5b1799 | [
"MIT"
] | null | null | null | defmodule B1 do
@moduledoc """
B1 keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 24.1 | 66 | 0.742739 |
731e7aab49695bd13e9853d63384f4c3489ed8c9 | 634 | exs | Elixir | elixir/mix.exs | ChrisWilding/canvas | 58023e1a3d25882f1f7e6aeb6fc932ede3ce9c18 | [
"Apache-2.0"
] | null | null | null | elixir/mix.exs | ChrisWilding/canvas | 58023e1a3d25882f1f7e6aeb6fc932ede3ce9c18 | [
"Apache-2.0"
] | null | null | null | elixir/mix.exs | ChrisWilding/canvas | 58023e1a3d25882f1f7e6aeb6fc932ede3ce9c18 | [
"Apache-2.0"
] | null | null | null | defmodule Canvas.MixProject do
use Mix.Project
def project do
[
app: :canvas,
version: "0.1.0",
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
{:dialyxir, "~> 1.0.0-rc.6", only: [:dev], runtime: false}
]
end
end
| 21.133333 | 87 | 0.567823 |
731eb99ef81ab93f3fcc6c427e2aa8f4519a22ec | 9,744 | ex | Elixir | lib/ash_json_api/error/schema_errors.ex | peillis/ash_json_api | f63ccacebc049eba8d37b8b58181fb46a4a0ea8c | [
"MIT"
] | 11 | 2020-09-21T22:03:42.000Z | 2022-02-02T23:48:11.000Z | lib/ash_json_api/error/schema_errors.ex | peillis/ash_json_api | f63ccacebc049eba8d37b8b58181fb46a4a0ea8c | [
"MIT"
] | 44 | 2020-05-02T04:37:42.000Z | 2021-06-25T14:38:44.000Z | lib/ash_json_api/error/schema_errors.ex | peillis/ash_json_api | f63ccacebc049eba8d37b8b58181fb46a4a0ea8c | [
"MIT"
] | 9 | 2020-08-25T20:23:34.000Z | 2022-02-14T04:40:10.000Z | defmodule AshJsonApi.Error.SchemaErrors do
@moduledoc false
def all_errors(%{reason: reason}, format \\ :parameter) do
reason
|> JsonXema.ValidationError.travers_errors([], fn error, path, acc ->
error
|> error_messages(path)
|> Enum.reduce(acc, fn message, acc ->
[%{path: format_path_name(format, path), message: message} | acc]
end)
end)
|> List.flatten()
end
defp format_path_name(:parameter, [path | rest]) do
Enum.join([path | Enum.map(rest, fn elem -> "[#{elem}]" end)], "")
end
defp format_path_name(:json_pointer, path) do
Enum.join(path, "/")
end
defp error_messages(reason, path, acc \\ [])
defp error_messages(%{exclusiveMinimum: minimum, value: value}, path, acc)
when minimum == value do
msg = "Value #{inspect(minimum)} equals exclusive minimum value of #{inspect(minimum)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{minimum: minimum, exclusiveMinimum: true, value: value}, path, acc)
when minimum == value do
msg = "Value #{inspect(value)} equals exclusive minimum value of #{inspect(minimum)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{minimum: minimum, exclusiveMinimum: true, value: value}, path, acc) do
msg = "Value #{inspect(value)} is less than minimum value of #{inspect(minimum)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{exclusiveMinimum: minimum, value: value}, path, acc) do
msg = "Value #{inspect(value)} is less than minimum value of #{inspect(minimum)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{minimum: minimum, value: value}, path, acc) do
msg = "Value #{inspect(value)} is less than minimum value of #{inspect(minimum)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{exclusiveMaximum: maximum, value: value}, path, acc)
when maximum == value do
msg = "Value #{inspect(maximum)} equals exclusive maximum value of #{inspect(maximum)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{maximum: maximum, exclusiveMaximum: true, value: value}, path, acc)
when maximum == value do
msg = "Value #{inspect(value)} equals exclusive maximum value of #{inspect(maximum)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{maximum: maximum, exclusiveMaximum: true, value: value}, path, acc) do
msg = "Value #{inspect(value)} exceeds maximum value of #{inspect(maximum)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{exclusiveMaximum: maximum, value: value}, path, acc) do
msg = "Value #{inspect(value)} exceeds maximum value of #{inspect(maximum)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{maximum: maximum, value: value}, path, acc) do
msg = "Value #{inspect(value)} exceeds maximum value of #{inspect(maximum)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{maxLength: max, value: value}, path, acc) do
msg = "Expected maximum length of #{inspect(max)}, got #{inspect(value)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{minLength: min, value: value}, path, acc) do
msg = "Expected minimum length of #{inspect(min)}, got #{inspect(value)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{multipleOf: multiple_of, value: value}, path, acc) do
msg = "Value #{inspect(value)} is not a multiple of #{inspect(multiple_of)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{enum: _enum, value: value}, path, acc) do
msg = "Value #{inspect(value)} is not defined in enum"
[msg <> at_path(path) | acc]
end
defp error_messages(%{minProperties: min, value: value}, path, acc) do
msg = "Expected at least #{inspect(min)} properties, got #{inspect(value)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{maxProperties: max, value: value}, path, acc) do
msg = "Expected at most #{inspect(max)} properties, got #{inspect(value)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{additionalProperties: false}, path, acc) do
msg = "Expected only defined properties, got key #{inspect(path)}."
[msg | acc]
end
defp error_messages(%{additionalItems: false}, path, acc) do
msg = "Unexpected additional item"
[msg <> at_path(path) | acc]
end
defp error_messages(%{format: format, value: value}, path, acc) do
msg = "String #{inspect(value)} does not validate against format #{inspect(format)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{then: error}, path, acc) do
msg = ["Schema for then does not match#{at_path(path)}"]
error =
error
|> JsonXema.ValidationError.travers_errors([], &error_messages/3, path: path)
|> indent()
Enum.concat([error, msg, acc])
end
defp error_messages(%{else: error}, path, acc) do
msg = ["Schema for else does not match#{at_path(path)}"]
error =
error
|> JsonXema.ValidationError.travers_errors([], &error_messages/3, path: path)
|> indent()
Enum.concat([error, msg, acc])
end
defp error_messages(%{not: :ok, value: value}, path, acc) do
msg = "Value is valid against schema from not, got #{inspect(value)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{contains: errors}, path, acc) do
msg = ["No items match contains#{at_path(path)}"]
errors =
errors
|> Enum.map(fn {index, reason} ->
JsonXema.ValidationError.travers_errors(reason, [], &error_messages/3,
path: path ++ [index]
)
end)
|> Enum.reverse()
|> indent()
Enum.concat([errors, msg, acc])
end
defp error_messages(%{anyOf: errors}, path, acc) do
msg = ["No match of any schema" <> at_path(path)]
errors =
errors
|> Enum.flat_map(fn reason ->
reason
|> JsonXema.ValidationError.travers_errors([], &error_messages/3, path: path)
|> Enum.reverse()
end)
|> Enum.reverse()
|> indent()
Enum.concat([errors, msg, acc])
end
defp error_messages(%{allOf: errors}, path, acc) do
msg = ["No match of all schema#{at_path(path)}"]
errors =
errors
|> Enum.map(fn reason ->
JsonXema.ValidationError.travers_errors(reason, [], &error_messages/3, path: path)
end)
|> Enum.reverse()
|> indent()
Enum.concat([errors, msg, acc])
end
defp error_messages(%{oneOf: {:error, errors}}, path, acc) do
msg = ["No match of any schema#{at_path(path)}"]
errors =
errors
|> Enum.map(fn reason ->
JsonXema.ValidationError.travers_errors(reason, [], &error_messages/3, path: path)
end)
|> Enum.reverse()
|> indent()
Enum.concat([errors, msg, acc])
end
defp error_messages(%{oneOf: {:ok, success}}, path, acc) do
msg = "More as one schema matches (indexes: #{inspect(success)})"
[msg <> at_path(path) | acc]
end
defp error_messages(%{required: required}, path, acc) do
msg = "Required properties are missing: #{inspect(required)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{propertyNames: errors, value: _value}, path, acc) do
msg = ["Invalid property names#{at_path(path)}"]
errors =
errors
|> Enum.map(fn {key, reason} ->
"#{inspect(key)} : #{error_messages(reason, [], [])}"
end)
|> Enum.reverse()
|> indent()
Enum.concat([errors, msg, acc])
end
defp error_messages(%{dependencies: deps}, path, acc) do
msg =
deps
|> Enum.reduce([], fn
{key, reason}, acc when is_map(reason) ->
sub_msg =
reason
|> error_messages(path, [])
|> Enum.reverse()
|> indent()
|> Enum.join("\n")
["Dependencies for #{inspect(key)} failed#{at_path(path)}\n#{sub_msg}" | acc]
{key, reason}, acc ->
[
"Dependencies for #{inspect(key)} failed#{at_path(path)}" <>
" Missing required key #{inspect(reason)}."
| acc
]
end)
|> Enum.reverse()
|> Enum.join("\n")
[msg | acc]
end
defp error_messages(%{minItems: min, value: value}, path, acc) do
msg = "Expected at least #{inspect(min)} items, got #{inspect(value)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{maxItems: max, value: value}, path, acc) do
msg = "Expected at most #{inspect(max)} items, got #{inspect(value)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{uniqueItems: true, value: value}, path, acc) do
msg = "Expected unique items, got #{inspect(value)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{const: const, value: value}, path, acc) do
msg = "Expected #{inspect(const)}, got #{inspect(value)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{pattern: pattern, value: value}, path, acc) do
msg = "Pattern #{inspect(pattern)} does not match value #{inspect(value)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{type: type, value: value}, path, acc) do
msg = "Expected #{inspect(type)}, got #{inspect(value)}"
[msg <> at_path(path) | acc]
end
defp error_messages(%{type: false}, path, acc) do
msg = "Schema always fails validation"
[msg <> at_path(path) | acc]
end
defp error_messages(%{properties: _}, _path, acc), do: acc
defp error_messages(%{items: _}, _path, acc), do: acc
defp error_messages(_error, path, acc) do
msg = "Unexpected error"
[msg <> at_path(path) | acc]
end
defp at_path([]), do: "."
defp at_path(path), do: ", at #{inspect(path)}."
defp indent(list), do: Enum.map(list, fn str -> " #{str}" end)
end
| 31.230769 | 94 | 0.618637 |
731ecb4fcdb1524bde6324c02fb711adb7912f14 | 8,047 | exs | Elixir | apps/graphql/test/web/resolvers/legal_entity_merge_job_resolver_test.exs | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 8 | 2019-06-14T11:34:49.000Z | 2021-08-05T19:14:24.000Z | apps/graphql/test/web/resolvers/legal_entity_merge_job_resolver_test.exs | edenlabllc/ehealth.api.public | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 1 | 2019-07-08T15:20:22.000Z | 2019-07-08T15:20:22.000Z | apps/graphql/test/web/resolvers/legal_entity_merge_job_resolver_test.exs | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 6 | 2018-05-11T13:59:32.000Z | 2022-01-19T20:15:22.000Z | defmodule GraphQL.LegalEntityMergeJobResolverTest do
@moduledoc false
use GraphQL.ConnCase, async: true
import Core.Factories
import Mox
alias Absinthe.Relay.Node
alias Ecto.UUID
setup :verify_on_exit!
setup %{conn: conn} do
user_id = UUID.generate()
tax_id = random_tax_id()
party = insert(:prm, :party, tax_id: tax_id)
insert(:prm, :party_user, party: party, user_id: user_id)
%{id: client_id} = insert(:prm, :legal_entity, edrpou: tax_id)
conn =
conn
|> put_scope("legal_entity:merge")
|> put_drfo(tax_id)
|> put_consumer_id(user_id)
|> put_client_id(client_id)
{:ok, %{conn: conn, tax_id: tax_id, client_id: client_id}}
end
describe "get list" do
setup %{conn: conn} do
merged_to = insert(:prm, :legal_entity)
merged_from = insert(:prm, :legal_entity)
meta = %{
"merged_to_legal_entity" => %{
"id" => merged_to.id,
"name" => merged_to.name,
"edrpou" => merged_to.edrpou
},
"merged_from_legal_entity" => %{
"id" => merged_from.id,
"name" => merged_from.name,
"edrpou" => merged_from.edrpou
}
}
job = build(:legal_entity_merge_job, meta: meta, status: "PROCESSED")
{:ok, %{conn: put_scope(conn, "legal_entity_merge_job:read"), job: job}}
end
test "filter by status and mergedToLegalEntity", %{conn: conn, job: job} do
type = job.type
status = job.status
edrpou = get_in(job[:meta], ~w(merged_to_legal_entity edrpou))
assert edrpou
expect(RPCWorkerMock, :run, fn _, _, :search_jobs, args ->
assert [filter, order_by, cursor] = args
# filter for status
assert {:status, :equal, ^status} = hd(filter)
# filter for type
assert {:type, :equal, ^type} = List.last(filter)
# filter for jsonb field meta
assert {{:meta, nil, value}, _} = List.pop_at(filter, 1)
assert [
{:merged_to_legal_entity, nil, [{:edrpou, :equal, edrpou}, {:is_active, :equal, true}]}
] == value
# order
assert [desc: :started_at] == order_by
assert {0, 3} == cursor
{:ok, [job, job, job]}
end)
expect(RPCWorkerMock, :run, fn _, _, :search_jobs, _args -> {:ok, [job]} end)
query = """
query ListLegalEntityMergeJobsQuery(
$first: Int!,
$filter: LegalEntityMergeJobFilter!,
$order_by: LegalEntityMergeJobOrderBy!
){
legalEntityMergeJobs(first: $first, filter: $filter, order_by: $order_by) {
pageInfo {
startCursor
endCursor
hasPreviousPage
hasNextPage
}
nodes {
id
status
startedAt
endedAt
mergedToLegalEntity{
id
name
edrpou
}
mergedFromLegalEntity{
id
name
edrpou
}
}
}
}
"""
variables = %{
first: 2,
filter: %{
status: status,
mergedToLegalEntity: %{
edrpou: edrpou,
is_active: true
}
},
order_by: "STARTED_AT_DESC"
}
resp_body =
conn
|> post_query(query, variables)
|> json_response(200)
refute resp_body["errors"]
resp = get_in(resp_body, ~w(data legalEntityMergeJobs))
assert 2 == length(resp["nodes"])
assert resp["pageInfo"]["hasNextPage"]
refute resp["pageInfo"]["hasPreviousPage"]
query = """
query ListLegalEntitiesQuery(
$first: Int!,
$filter: LegalEntityMergeJobFilter!,
$order_by: LegalEntityMergeJobOrderBy!,
$after: String!
){
legalEntityMergeJobs(first: $first, filter: $filter, order_by: $order_by, after: $after) {
pageInfo {
hasPreviousPage
hasNextPage
}
nodes {
id
status
}
}
}
"""
variables = %{
first: 2,
filter: %{
status: "PROCESSED",
mergedToLegalEntity: %{
edrpou: edrpou
}
},
order_by: "STARTED_AT_ASC",
after: resp["pageInfo"]["endCursor"]
}
resp =
conn
|> post_query(query, variables)
|> json_response(200)
|> get_in(~w(data legalEntityMergeJobs))
assert 1 == length(resp["nodes"])
refute resp["pageInfo"]["hasNextPage"]
assert resp["pageInfo"]["hasPreviousPage"]
end
test "argument `first` not set", %{conn: conn} do
query = """
query ListLegalEntitiesQuery($after: String!){
legalEntityMergeJobs(after: $after) {
nodes {
id
}
}
}
"""
variables = %{
after: "some-cursor"
}
resp =
conn
|> post_query(query, variables)
|> json_response(200)
assert Enum.any?(resp["errors"], &match?(%{"message" => "You must either supply `:first` or `:last`"}, &1))
end
end
describe "get by id" do
setup %{conn: conn} do
{:ok, %{conn: put_scope(conn, "legal_entity_merge_job:read")}}
end
test "success", %{conn: conn} do
merged_to = insert(:prm, :legal_entity)
merged_from = insert(:prm, :legal_entity)
meta = %{
"merged_to_legal_entity" => %{
"id" => merged_to.id,
"name" => merged_to.name,
"edrpou" => merged_to.edrpou
},
"merged_from_legal_entity" => %{
"id" => merged_from.id,
"name" => merged_from.name,
"edrpou" => merged_from.edrpou
}
}
job = build(:legal_entity_merge_job, meta: meta, status: "PROCESSED")
task = build(:job_task, job_id: job.id)
expect(RPCWorkerMock, :run, fn _, _, :get_job, args ->
assert job.id == hd(args)
{:ok, job}
end)
expect(RPCWorkerMock, :run, fn _, _, :search_tasks, args ->
assert [filter, _order_by, _cursor] = args
assert [{:job_id, :in, [job.id]}] == filter
{:ok, [task]}
end)
id = Node.to_global_id("LegalEntityMergeJob", job.id)
query = """
query GetLegalEntityMergeJobQuery($id: ID) {
legalEntityMergeJob(id: $id) {
id
status
startedAt
endedAt
mergedToLegalEntity{
id
name
edrpou
}
mergedFromLegalEntity{
id
name
edrpou
}
tasks(first: 10){
nodes {
name
}
}
}
}
"""
variables = %{id: id}
resp_body =
conn
|> post_query(query, variables)
|> json_response(200)
refute resp_body["errors"]
resp = get_in(resp_body, ~w(data legalEntityMergeJob))
assert job.meta["merged_to_legal_entity"] == resp["mergedToLegalEntity"]
assert job.meta["merged_from_legal_entity"] == resp["mergedFromLegalEntity"]
assert "PROCESSED" == resp["status"]
assert [%{"name" => task.name}] == resp["tasks"]["nodes"]
end
test "job not found", %{conn: conn} do
expect(RPCWorkerMock, :run, fn _, _, :get_job, _args -> {:ok, nil} end)
query = """
query GetLegalEntityMergeJobQuery($id: ID) {
legalEntityMergeJob(id: $id) {
id
}
}
"""
variables = %{id: Node.to_global_id("LegalEntityMergeJob", "invalid-id")}
resp =
conn
|> post_query(query, variables)
|> json_response(200)
assert match?(%{"legalEntityMergeJob" => nil}, resp["data"])
end
end
end
| 25.46519 | 113 | 0.516714 |
731f00593ce447bb5d1fdead09b44861bcb2bce4 | 67 | ex | Elixir | lib/webapp_web/views/admin/region_view.ex | runhyve/webapp | 434b074f98c1ebac657b56062c1c1a54e683dea1 | [
"BSD-2-Clause"
] | 12 | 2019-07-02T14:30:06.000Z | 2022-03-12T08:22:18.000Z | lib/webapp_web/views/admin/region_view.ex | runhyve/webapp | 434b074f98c1ebac657b56062c1c1a54e683dea1 | [
"BSD-2-Clause"
] | 9 | 2020-03-16T20:10:50.000Z | 2021-06-17T17:45:44.000Z | lib/webapp_web/views/admin/region_view.ex | runhyve/webapp | 434b074f98c1ebac657b56062c1c1a54e683dea1 | [
"BSD-2-Clause"
] | null | null | null | defmodule WebappWeb.Admin.RegionView do
use WebappWeb, :view
end
| 16.75 | 39 | 0.80597 |
731f0648af877b2765a7035c49fab33c1e716cd8 | 4,155 | ex | Elixir | deps/ecto/lib/ecto/adapters/postgres/datetime.ex | rchervin/phoenixportfolio | a5a6a60168d7261647a10a8dbd395b440db8a4f9 | [
"MIT"
] | 1 | 2017-11-27T06:00:32.000Z | 2017-11-27T06:00:32.000Z | deps/ecto/lib/ecto/adapters/postgres/datetime.ex | rchervin/phoenixportfolio | a5a6a60168d7261647a10a8dbd395b440db8a4f9 | [
"MIT"
] | null | null | null | deps/ecto/lib/ecto/adapters/postgres/datetime.ex | rchervin/phoenixportfolio | a5a6a60168d7261647a10a8dbd395b440db8a4f9 | [
"MIT"
] | null | null | null | if Code.ensure_loaded?(Postgrex) do
defmodule Ecto.Adapters.Postgres.Time do
@moduledoc false
import Postgrex.BinaryUtils, warn: false
use Postgrex.BinaryExtension, [send: "time_send"]
def init(opts), do: opts
def encode(_) do
quote location: :keep do
{hour, min, sec, usec} when hour in 0..23 and min in 0..59 and sec in 0..59 and usec in 0..999_999 ->
time = {hour, min, sec}
<<8 :: int32, :calendar.time_to_seconds(time) * 1_000_000 + usec :: int64>>
end
end
def decode(_) do
quote location: :keep do
<<8 :: int32, microsecs :: int64>> ->
secs = div(microsecs, 1_000_000)
usec = rem(microsecs, 1_000_000)
{hour, min, sec} = :calendar.seconds_to_time(secs)
{hour, min, sec, usec}
end
end
end
defmodule Ecto.Adapters.Postgres.Date do
@moduledoc false
import Postgrex.BinaryUtils, warn: false
use Postgrex.BinaryExtension, send: "date_send"
@gd_epoch :calendar.date_to_gregorian_days({2000, 1, 1})
@max_year 5874897
def init(opts), do: opts
def encode(_) do
quote location: :keep do
{year, month, day} when year <= unquote(@max_year) ->
date = {year, month, day}
<<4 :: int32, :calendar.date_to_gregorian_days(date) - unquote(@gd_epoch) :: int32>>
end
end
def decode(_) do
quote location: :keep do
<<4 :: int32, days :: int32>> ->
:calendar.gregorian_days_to_date(days + unquote(@gd_epoch))
end
end
end
defmodule Ecto.Adapters.Postgres.Timestamp do
@moduledoc false
import Postgrex.BinaryUtils, warn: false
use Postgrex.BinaryExtension, [send: "timestamp_send"]
@gs_epoch :calendar.datetime_to_gregorian_seconds({{2000, 1, 1}, {0, 0, 0}})
@max_year 294276
def init(opts), do: opts
def encode(_) do
quote location: :keep do
timestamp ->
Ecto.Adapters.Postgres.Timestamp.encode!(timestamp)
end
end
def decode(_) do
quote location: :keep do
<<8 :: int32, microsecs :: int64>> ->
Ecto.Adapters.Postgres.Timestamp.decode!(microsecs)
end
end
## Helpers
def encode!({{year, month, day}, {hour, min, sec, usec}})
when year <= @max_year and hour in 0..23 and min in 0..59 and sec in 0..59 and usec in 0..999_999 do
datetime = {{year, month, day}, {hour, min, sec}}
secs = :calendar.datetime_to_gregorian_seconds(datetime) - @gs_epoch
<<8 :: int32, secs * 1_000_000 + usec :: int64>>
end
def encode!(arg) do
raise ArgumentError, """
could not encode date/time: #{inspect arg}
This error happens when you are by-passing Ecto's Query API by
using either Ecto.Adapters.SQL.query/4 or Ecto fragments. This
makes Ecto unable to properly cast the type. You can fix this by
explicitly telling Ecto which type to use via `type/2` or by
implementing the Ecto.DataType protocol for the given value.
"""
end
def decode!(microsecs) when microsecs < 0 and rem(microsecs, 1_000_000) != 0 do
secs = div(microsecs, 1_000_000) - 1
microsecs = 1_000_000 + rem(microsecs, 1_000_000)
split(secs, microsecs)
end
def decode!(microsecs) do
secs = div(microsecs, 1_000_000)
microsecs = rem(microsecs, 1_000_000)
split(secs, microsecs)
end
defp split(secs, microsecs) do
{date, {hour, min, sec}} = :calendar.gregorian_seconds_to_datetime(secs + @gs_epoch)
{date, {hour, min, sec, microsecs}}
end
end
defmodule Ecto.Adapters.Postgres.TimestampTZ do
@moduledoc false
import Postgrex.BinaryUtils, warn: false
use Postgrex.BinaryExtension, [send: "timestamptz_send"]
def init(opts), do: opts
def encode(_) do
quote location: :keep do
timestamp ->
Ecto.Adapters.Postgres.Timestamp.encode!(timestamp)
end
end
def decode(_) do
quote location: :keep do
<<8 :: int32, microsecs :: int64>> ->
Ecto.Adapters.Postgres.Timestamp.decode!(microsecs)
end
end
end
end
| 30.108696 | 109 | 0.627677 |
731f0ac1650aad2978ec237ce89289ce5878948e | 4,718 | exs | Elixir | test/changelog_web/plugs/vanity_domains_test.exs | sorentwo/changelog.com | 58e02a9fc0a84a1e49c99e775c231cc266f34309 | [
"MIT"
] | null | null | null | test/changelog_web/plugs/vanity_domains_test.exs | sorentwo/changelog.com | 58e02a9fc0a84a1e49c99e775c231cc266f34309 | [
"MIT"
] | null | null | null | test/changelog_web/plugs/vanity_domains_test.exs | sorentwo/changelog.com | 58e02a9fc0a84a1e49c99e775c231cc266f34309 | [
"MIT"
] | null | null | null | defmodule ChangelogWeb.VanityDomainsTest do
use ChangelogWeb.ConnCase
alias ChangelogWeb.{Plug}
@jsparty %{
vanity_domain: "https://jsparty.fm",
slug: "jsparty",
apple_url: "https://podcasts.apple.com/us/podcast/js-party/id1209616598",
name: "JS Party"
}
@gotime %{
vanity_domain: "https://gotime.fm",
slug: "gotime",
spotify_url: "https://spotify.com",
name: "Go Time"
}
def assert_vanity_redirect(conn, path_or_url) do
location = conn |> get_resp_header("location") |> List.first()
assert conn.status == 302
if String.starts_with?(path_or_url, "http") do
assert location == path_or_url
else
assert location == "https://#{ChangelogWeb.Endpoint.host()}#{path_or_url}"
end
end
def build_conn_with_host_and_path(host, path) do
build_conn(:get, path) |> put_req_header("host", host)
end
def assign_podcasts(conn, podcasts) do
assign(conn, :podcasts, podcasts)
end
test "vanity redirects for root path" do
conn =
build_conn_with_host_and_path("jsparty.fm", "/")
|> assign_podcasts([@jsparty])
|> Plug.VanityDomains.call([])
assert_vanity_redirect(conn, "/jsparty")
end
test "vanity redirects for episode sub-paths" do
conn =
build_conn_with_host_and_path("gotime.fm", "/102")
|> assign_podcasts([@gotime, @jsparty])
|> Plug.VanityDomains.call([])
assert_vanity_redirect(conn, "/gotime/102")
end
test "vanity redirects for apple URL" do
conn =
build_conn_with_host_and_path("jsparty.fm", "/apple")
|> assign_podcasts([@gotime, @jsparty])
|> Plug.VanityDomains.call([])
assert_vanity_redirect(conn, @jsparty.apple_url)
end
test "vanity redirects for spotify URL" do
conn =
build_conn_with_host_and_path("gotime.fm", "/spotify")
|> assign_podcasts([@gotime, @gotime])
|> Plug.VanityDomains.call([])
assert_vanity_redirect(conn, @gotime.spotify_url)
end
test "vanity redirects for overcast URL" do
conn =
build_conn_with_host_and_path("jsparty.fm", "/overcast")
|> assign_podcasts([@gotime, @jsparty])
|> Plug.VanityDomains.call([])
assert_vanity_redirect(conn, "https://overcast.fm/itunes1209616598/js-party")
end
test "vanity redirects for RSS URL" do
conn =
build_conn_with_host_and_path("gotime.fm", "/rss")
|> assign_podcasts([@gotime, @jsparty])
|> Plug.VanityDomains.call([])
assert_vanity_redirect(conn, "/gotime/feed")
end
test "vanity redirects for email URL" do
conn =
build_conn_with_host_and_path("jsparty.fm", "/email")
|> assign_podcasts([@gotime, @jsparty])
|> Plug.VanityDomains.call([])
assert_vanity_redirect(conn, "/subscribe/jsparty")
end
test "vanity redirects for request URL" do
conn =
build_conn_with_host_and_path("gotime.fm", "/request")
|> assign_podcasts([@gotime, @jsparty])
|> Plug.VanityDomains.call([])
assert_vanity_redirect(conn, "/request/gotime")
end
test "vanity redirects for community URL" do
conn =
build_conn_with_host_and_path("jsparty.fm", "/community")
|> assign_podcasts([@gotime, @jsparty])
|> Plug.VanityDomains.call([])
assert_vanity_redirect(conn, "/community")
end
test "vanity redirects for guest guide" do
conn =
build_conn_with_host_and_path("jsparty.fm", "/guest")
|> assign_podcasts([@gotime, @jsparty])
|> Plug.VanityDomains.call([])
assert_vanity_redirect(conn, "/guest/jsparty")
end
test "vanity redirects for jsparty ff form" do
conn =
build_conn_with_host_and_path("jsparty.fm", "/ff")
|> assign_podcasts([@gotime, @jsparty])
|> Plug.VanityDomains.call([])
assert_vanity_redirect(conn, "https://changelog.typeform.com/to/wTCcQHGQ")
end
test "it does not vanity redirect for default host" do
conn =
build_conn_with_host_and_path(ChangelogWeb.Endpoint.host(), "/")
|> assign_podcasts([@gotime, @jsparty])
|> Plug.VanityDomains.call([])
vanity_redirect = conn |> get_resp_header("x-changelog-vanity-redirect")
assert vanity_redirect == ["false"]
end
test "it does not vanity redirect for default host subdomain" do
conn =
build_conn_with_host_and_path("21.#{ChangelogWeb.Endpoint.host()}", "/")
|> assign_podcasts([@gotime, @jsparty])
|> Plug.VanityDomains.call([])
vanity_redirect = conn |> get_resp_header("x-changelog-vanity-redirect")
assert vanity_redirect == ["false"]
end
test "it no-ops for other hosts" do
conn =
:get
|> build_conn("")
|> Plug.VanityDomains.call([])
assert conn.status == nil
end
end
| 28.421687 | 81 | 0.666172 |
731f10bda7f872e5d0eaff905cc1ca5f373e1b13 | 129 | exs | Elixir | test/test_helper.exs | wojtekmach/plug_cowboy | 45edb6ea83a854eaee7a9f88d8499599439f937a | [
"Apache-2.0"
] | 183 | 2018-10-18T18:50:17.000Z | 2022-01-11T22:28:20.000Z | test/test_helper.exs | wojtekmach/plug_cowboy | 45edb6ea83a854eaee7a9f88d8499599439f937a | [
"Apache-2.0"
] | 76 | 2018-10-19T07:54:00.000Z | 2022-02-26T13:22:36.000Z | test/test_helper.exs | wojtekmach/plug_cowboy | 45edb6ea83a854eaee7a9f88d8499599439f937a | [
"Apache-2.0"
] | 46 | 2018-10-18T21:38:44.000Z | 2021-12-29T12:39:25.000Z | ExUnit.start(assert_receive_timeout: 1000)
Logger.configure_backend(:console, colors: [enabled: false], metadata: [:request_id])
| 43 | 85 | 0.79845 |
731f10cf47ca2aef197da5ce24181198aae4cb3e | 1,157 | ex | Elixir | core/sup_tree_core/executor_pool/async_job_log_writer.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | 1 | 2020-08-27T18:43:11.000Z | 2020-08-27T18:43:11.000Z | core/sup_tree_core/executor_pool/async_job_log_writer.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | null | null | null | core/sup_tree_core/executor_pool/async_job_log_writer.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | 1 | 2020-08-27T18:43:21.000Z | 2020-08-27T18:43:21.000Z | # Copyright(c) 2015-2020 ACCESS CO., LTD. All rights reserved.
use Croma
defmodule AntikytheraCore.ExecutorPool.AsyncJobLog.Writer do
@moduledoc """
A `GenServer` for logging, which is used in `AntikytheraCore.ExecutorPool.AsyncJobRunner`.
"""
use GenServer
alias Antikythera.{Time, ContextId}
alias AntikytheraCore.GearLog.LogRotation
@rotate_interval 24 * 60 * 60 * 1000
def start_link([]) do
GenServer.start_link(__MODULE__, [], [name: __MODULE__])
end
@impl true
def init([]) do
log_file_path = AntikytheraCore.Path.core_log_file_path("async_job")
state = LogRotation.init(@rotate_interval, log_file_path, write_to_terminal: false)
{:ok, state}
end
@impl true
def handle_cast(message, state) do
log = {Time.now(), :info, ContextId.system_context(), message}
{:noreply, LogRotation.write_log(state, log)}
end
@impl true
def handle_info(:rotate, state) do
{:noreply, LogRotation.rotate(state)}
end
@impl true
def terminate(_reason, state) do
LogRotation.terminate(state)
end
#
# Public API
#
def info(message) do
GenServer.cast(__MODULE__, message)
end
end
| 23.14 | 92 | 0.709594 |
731f1767bb42f7f9bb5aac0ea707b82fc2baa572 | 2,671 | exs | Elixir | test/cog/chat/hipchat/templates/embedded/relay_group_info_test.exs | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 1,003 | 2016-02-23T17:21:12.000Z | 2022-02-20T14:39:35.000Z | test/cog/chat/hipchat/templates/embedded/relay_group_info_test.exs | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 906 | 2016-02-22T22:54:19.000Z | 2022-03-11T15:19:43.000Z | test/cog/chat/hipchat/templates/embedded/relay_group_info_test.exs | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 95 | 2016-02-23T13:42:31.000Z | 2021-11-30T14:39:55.000Z | defmodule Cog.Chat.HipChat.Templates.Embedded.RelayGroupInfoTest do
use Cog.TemplateCase
test "relay-group-info template with one input" do
data = %{"results" => [%{"name" => "foo",
"created_at" => "some point in the past",
"relays" => [%{"name" => "my_relay"},
%{"name" => "my_other_relay"}],
"bundles" => [%{"name" => "foo"},
%{"name" => "bar"},
%{"name" => "baz"}]}]}
expected = """
<strong>Name:</strong> foo<br/>
<strong>Relays:</strong> my_relay, my_other_relay<br/>
<strong>Bundles:</strong> foo, bar, baz
""" |> String.replace("\n", "")
assert_rendered_template(:hipchat, :embedded, "relay-group-info", data, expected)
end
test "relay-group-info template with multiple inputs" do
data = %{"results" => [%{"name" => "foo",
"created_at" => "some point in the past",
"relays" => [%{"name" => "my_relay"},
%{"name" => "my_other_relay"}],
"bundles" => [%{"name" => "foo"},
%{"name" => "bar"},
%{"name" => "baz"}]},
%{"name" => "bar",
"created_at" => "long long ago in a galaxy far away",
"relays" => [],
"bundles" => [%{"name" => "foo"},
%{"name" => "bar"},
%{"name" => "baz"}]},
%{"name" => "baz",
"created_at" => "right... NOW",
"relays" => [%{"name" => "my_relay"},
%{"name" => "my_other_relay"}],
"bundles" => []}
]}
expected = """
<strong>Name:</strong> foo<br/>
<strong>Relays:</strong> my_relay, my_other_relay<br/>
<strong>Bundles:</strong> foo, bar, baz<br/>
<br/>
<strong>Name:</strong> bar<br/>
<strong>Relays:</strong> No relays<br/>
<strong>Bundles:</strong> foo, bar, baz<br/>
<br/>
<strong>Name:</strong> baz<br/>
<strong>Relays:</strong> my_relay, my_other_relay<br/>
<strong>Bundles:</strong> No bundles assigned
""" |> String.replace("\n", "")
assert_rendered_template(:hipchat, :embedded, "relay-group-info", data, expected)
end
end
| 44.516667 | 85 | 0.403969 |
731f33e006e91a5dbf51a4e82a6c9560334dc079 | 1,660 | ex | Elixir | lib/password_validator/validators/character_set_validator/config.ex | philippneugebauer/password-validator | 01d8f6532d4391e4d74f10548c2b32b2449eeb66 | [
"Apache-2.0"
] | null | null | null | lib/password_validator/validators/character_set_validator/config.ex | philippneugebauer/password-validator | 01d8f6532d4391e4d74f10548c2b32b2449eeb66 | [
"Apache-2.0"
] | null | null | null | lib/password_validator/validators/character_set_validator/config.ex | philippneugebauer/password-validator | 01d8f6532d4391e4d74f10548c2b32b2449eeb66 | [
"Apache-2.0"
] | null | null | null | defmodule PasswordValidator.Validators.CharacterSetValidator.Config do
defstruct [:upper_case, :lower_case, :numbers, :special, :allowed_special_characters]
@type keys :: :upper_case | :lower_case | :numbers | :special
alias PasswordValidator.Validators.CharacterSetValidator.Config
@spec from_options(list({atom(), any()})) :: %Config{}
def from_options(opts) do
config = Keyword.get(opts, :character_set, [])
%__MODULE__{
lower_case: character_set_config(config, :lower_case),
upper_case: character_set_config(config, :upper_case),
numbers: character_set_config(config, :numbers),
special: character_set_config(config, :special),
allowed_special_characters: allowed_special_characters_config(config)
}
end
@spec character_set_config(list(), keys()) :: list(integer() | :infinity)
defp character_set_config(opts, key) do
option = Keyword.get(opts, key, [0, :infinity])
case option do
number when is_integer(number) -> [number, :infinity]
[min, max] when is_integer(min) and is_integer(max) -> [min, max]
[min, :infinity] when is_integer(min) -> [min, :infinity]
_ -> raise "Invalid configuration"
end
end
@spec allowed_special_characters_config(list()) :: String.t | :all
defp allowed_special_characters_config(opts) do
case Keyword.get(opts, :allowed_special_characters, :all) do
allowed_characters when is_binary(allowed_characters) -> allowed_characters
:all -> :all
invalid_config ->
raise "Invalid allowed_special_characters config. Got: #{inspect invalid_config} when a binary (string) was expected"
end
end
end
| 40.487805 | 125 | 0.71747 |
731f53d0a696c59b26b7e0abf172933a6c0bf5eb | 4,132 | ex | Elixir | lib/request/pci_proxies/channex_pci.ex | ChannexIO/ex_open_travel | 51a1101f55bc2d12a093237bb9ef64ef8a4d3091 | [
"Apache-2.0"
] | null | null | null | lib/request/pci_proxies/channex_pci.ex | ChannexIO/ex_open_travel | 51a1101f55bc2d12a093237bb9ef64ef8a4d3091 | [
"Apache-2.0"
] | null | null | null | lib/request/pci_proxies/channex_pci.ex | ChannexIO/ex_open_travel | 51a1101f55bc2d12a093237bb9ef64ef8a4d3091 | [
"Apache-2.0"
] | null | null | null | defmodule ExOpenTravel.Request.PCIProxies.ChannexPCI do
alias ExOpenTravel.Request
alias ExOpenTravel.Request.Helpers
@pci_url "https://pci.channex.io/api/v1/capture"
@type options :: keyword() | any()
@spec proxy_send({String.t(), map()}, map(), options) ::
{:ok, map(), map()} | {:error, map(), map()}
def proxy_send(payload, credentials, opts \\ [])
def proxy_send(
{document, %{success: true} = meta},
%{
endpoint: endpoint,
pci_proxy_fetch_header: fetch_header,
pci_proxy_profile_name: profile_name,
pci_proxy_api_key: api_key
},
opts
) do
with url <- get_url(endpoint, profile_name, api_key),
{:ok, response, meta} <- Request.send({document, meta}, %{endpoint: url}, opts) do
enriched_meta =
with {:ok, headers} <- parse_headers(meta, fetch_header),
{:ok, pci} <- convert_token_headers(headers) do
Map.put(meta, :pci, pci)
else
{:error, error} ->
meta
|> Map.put(:success, false)
|> Map.update(:errors, error, &[error | &1])
end
{:ok, response, enriched_meta}
end
end
def proxy_send({document, meta}, credentials, opts) do
updated_meta =
meta
|> Helpers.update_meta_if_unfounded(credentials, :endpoint)
|> Helpers.update_meta_if_unfounded(credentials, :pci_proxy_fetch_header)
|> Helpers.update_meta_if_unfounded(credentials, :pci_proxy_profile_name)
|> Helpers.update_meta_if_unfounded(credentials, :pci_proxy_api_key)
|> Map.put(:success, false)
Request.send({document, updated_meta}, credentials, opts)
end
defp get_url(endpoint, profile_name, api_key) do
"#{@pci_url}?apikey=#{api_key}&method=post&profile=#{profile_name}&url=#{endpoint}&savecvv=true"
end
def parse_headers(%{headers: headers}, fetch_header) do
{
:ok,
# The direction of the headers sequence is critical!
%{
tokens: headers |> get_from(fetch_header) |> split(","),
errors: headers |> get_from("x-pci-channex-errors") |> split(","),
warnings: headers |> get_from("x-pci-channex-warnings") |> split(",")
}
}
end
def parse_headers(_meta, _header), do: {:error, "Meta not contains any headers"}
def convert_token_headers(meta) do
tokens = Map.get(meta, :tokens) || []
errors = Map.get(meta, :errors) || List.duplicate("", length(tokens))
warnings = Map.get(meta, :warnings) || List.duplicate("", length(tokens))
combine_headers([], tokens, errors, warnings)
end
defp combine_headers(result, [], [], []), do: {:ok, result}
defp combine_headers(result, [token | tokens], [error | errors], [warning | warnings]) do
result
|> result_headers_update(token, error, warning)
|> combine_headers(tokens, errors, warnings)
end
defp combine_headers(_result, _tokens, _errors, []) do
{:error, "Headers contains non consistent warnings list"}
end
defp combine_headers(_result, [], [], _warnings) do
{:error, "Headers contains non consistent warnings list"}
end
defp combine_headers(_result, [], _errors, _warnings) do
{:error, "Headers contains non consistent errors list"}
end
defp combine_headers(_result, _tokens, [], _warnings) do
{:error, "Headers contains non consistent errors list"}
end
defp result_headers_update(result, token, error, warning) do
insertion =
%{}
|> insert_header(:token, token)
|> insert_header(:error, error)
|> insert_header(:warning, warning)
|> List.wrap()
result ++ insertion
end
defp insert_header(map, _, ""), do: map
defp insert_header(map, name, header), do: Map.put(map, name, header)
defp split(nil, _delimiter), do: nil
defp split(header, delimiter), do: header |> String.split(delimiter) |> Enum.map(&String.trim/1)
defp get_from(headers, key), do: Enum.find_value(headers, &compare_keys(&1, key))
defp compare_keys([header_key, header_value], key) do
if String.downcase(header_key) == String.downcase(key), do: header_value
end
end
| 32.28125 | 100 | 0.646902 |
731f5406d946468a67351bc37e8080fe93cc2f20 | 2,874 | ex | Elixir | lib/aoc2019/day10.ex | pauljxtan/aoc2019 | 77abd8720a82b6d6080419fe3f34acbb781899d4 | [
"WTFPL"
] | 1 | 2020-03-27T14:57:32.000Z | 2020-03-27T14:57:32.000Z | lib/aoc2019/day10.ex | pauljxtan/aoc2019 | 77abd8720a82b6d6080419fe3f34acbb781899d4 | [
"WTFPL"
] | 1 | 2019-12-07T14:32:47.000Z | 2019-12-08T16:18:07.000Z | lib/aoc2019/day10.ex | pauljxtan/aoc2019 | 77abd8720a82b6d6080419fe3f34acbb781899d4 | [
"WTFPL"
] | null | null | null | defmodule Aoc2019.Day10 do
@behaviour DaySolution
def solve_part1(),
do: input_map() |> parse_asteroids() |> best_location() |> (fn {_, count} -> count end).()
def solve_part2() do
{station_coord, _} = input_map() |> parse_asteroids() |> best_location()
input_map()
|> parse_asteroids()
|> vaporization_order(station_coord)
|> Enum.at(199)
|> (fn {x, y} -> x * 100 + y end).()
end
defp input_map(),
do:
File.read!("inputs/input_day10")
|> String.split("\n")
|> List.delete_at(-1)
def best_location(asteroids),
do:
asteroids
|> Enum.map(fn source -> {source, asteroids |> count_detectable(source)} end)
|> Enum.max_by(fn {_, detectable} -> detectable end)
def vaporization_order(asteroids, station_coord),
do:
asteroids
|> group_by_angle(station_coord)
|> Enum.map(fn {_, grp} -> grp end)
|> vaporization_order_helper([])
defp vaporization_order_helper([], order), do: order
defp vaporization_order_helper(groups, order) do
{grps, acc} =
groups
|> Enum.reduce({[], []}, fn grp, {grps, acc} ->
if length(grp) > 0 do
[head | tail] = grp
{grps ++ [tail], acc ++ [head]}
else
{grps, acc}
end
end)
vaporization_order_helper(grps, order ++ acc)
end
def parse_asteroids(map),
do:
for(
y <- 0..(length(map) - 1),
x <- 0..((Enum.at(map, 0) |> String.length()) - 1),
do: {{x, y}, if(map |> Enum.at(y) |> String.at(x) == "#", do: true, else: false)}
)
|> Enum.filter(fn {_, has_asteroid} -> has_asteroid end)
|> Enum.map(fn {coord, _} -> coord end)
|> Enum.sort()
# This method counts unique angles b/w the source and other asteroids
def count_detectable(asteroids, source),
do:
asteroids
|> Enum.filter(&(&1 != source))
|> Enum.map(fn asteroid -> angle(source, asteroid) end)
|> Enum.uniq()
|> Enum.count()
def group_by_angle(asteroids, source),
do:
asteroids
|> Enum.filter(&(&1 != source))
|> Enum.reduce(%{}, fn ast, acc ->
ang = angle(source, ast)
acc |> Map.put(ang, Map.get(acc, ang, []) ++ [ast])
end)
# Sort each group by increasing distance from source
|> Enum.map(fn {ang, grp} -> {ang, grp |> Enum.sort_by(fn ast -> dist(source, ast) end)} end)
# Add 2 pi to angles less than -pi/2 so that they start from -pi/2 rad (pointing up)
|> Enum.map(fn {ang, grp} ->
{if(ang < -:math.pi() / 2, do: ang + 2 * :math.pi(), else: ang), grp}
end)
# Sort in clockwise order
|> Enum.sort_by(fn {ang, _} -> ang end)
defp angle({x1, y1}, {x2, y2}), do: :math.atan2(y2 - y1, x2 - x1)
defp dist({x1, y1}, {x2, y2}), do: :math.sqrt(:math.pow(y2 - y1, 2) + :math.pow(x2 - x1, 2))
end
| 30.903226 | 99 | 0.559151 |
731f5f594fcd2b645d908bad1e8908cf4fb453bb | 1,434 | exs | Elixir | config/config.exs | LouisProulx/test-peepchat-backend | 76c6b1a1a31820bcb6c79addaeb85d5f1a0ad47c | [
"MIT"
] | null | null | null | config/config.exs | LouisProulx/test-peepchat-backend | 76c6b1a1a31820bcb6c79addaeb85d5f1a0ad47c | [
"MIT"
] | null | null | null | config/config.exs | LouisProulx/test-peepchat-backend | 76c6b1a1a31820bcb6c79addaeb85d5f1a0ad47c | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :peepchat,
ecto_repos: [Peepchat.Repo]
# Configures the endpoint
config :peepchat, Peepchat.Endpoint,
url: [host: "localhost"],
secret_key_base: "9pOXv4wMsJ7VOdzNqlbwHD+703Tn9+kmzZ36TL02Ts0WQ/QFqsob5eT3VaP1xg+u",
render_errors: [view: Peepchat.ErrorView, accepts: ~w(json)],
pubsub: [name: Peepchat.PubSub,
adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
config :phoenix, :format_encoders,
"json-api": Poison
config :mime, :types, %{
"application/vnd.api+json" => ["json-api"]
}
config :guardian, Guardian,
allowed_algos: ["HS512"], # optional
verify_module: Guardian.JWT, # optional
issuer: "Peepchat",
ttl: { 30, :days },
allowed_drift: 2000,
verify_issuer: true, # optional
secret_key: System.get_env("GUARDIAN_SECRET") || "dhlQ5X4K8p+MTyt71Mzv0yOYqBgS2kO7Tjo7KAzwFor2WofeBvWGo59+rryz/GYW",
serializer: Peepchat.GuardianSerializer
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 31.866667 | 118 | 0.747559 |
731fa15a82b4ea219f03d464533f376fffb44dbf | 97 | ex | Elixir | rumbrella/apps/rumbl/lib/rumbl/repo.ex | antonmi/espec_phoenix | 51c44b6b1ece9fb48438b93f9765dfe834ee5544 | [
"MIT"
] | 144 | 2015-04-30T15:12:46.000Z | 2022-01-07T03:40:26.000Z | rumbrella/apps/rumbl/lib/rumbl/repo.ex | antonmi/espec_phoenix | 51c44b6b1ece9fb48438b93f9765dfe834ee5544 | [
"MIT"
] | 57 | 2015-04-29T11:26:54.000Z | 2022-01-05T13:35:10.000Z | rumbrella/apps/rumbl/lib/rumbl/repo.ex | antonmi/espec_phoenix | 51c44b6b1ece9fb48438b93f9765dfe834ee5544 | [
"MIT"
] | 46 | 2015-06-12T11:26:15.000Z | 2021-12-15T00:38:26.000Z | defmodule Rumbl.Repo do
use Ecto.Repo, otp_app: :rumbl,
adapter: Ecto.Adapters.Postgres
end
| 16.166667 | 33 | 0.752577 |
731fb35209d6728e64005ddcf4dc9c7e59683173 | 1,054 | exs | Elixir | test/image_test.exs | ricn/rapport | f00f9945a11ca3f68e79c8f5709fa170b8ac4682 | [
"MIT"
] | 59 | 2017-09-30T23:28:20.000Z | 2021-12-13T21:20:08.000Z | test/image_test.exs | ricn/rapport | f00f9945a11ca3f68e79c8f5709fa170b8ac4682 | [
"MIT"
] | 5 | 2017-10-27T07:28:13.000Z | 2021-12-13T21:56:47.000Z | test/image_test.exs | ricn/rapport | f00f9945a11ca3f68e79c8f5709fa170b8ac4682 | [
"MIT"
] | 2 | 2017-10-26T13:50:46.000Z | 2018-06-18T11:58:50.000Z | defmodule ImageTest do
use ExUnit.Case
doctest Rapport.Image
alias Rapport.Image
describe "as_data" do
test "must convert png image to correct data" do
png = File.read!(Path.join(__DIR__, "images/png.png"))
data = Image.as_data(png)
assert data =~ "image/png;base64"
assert data =~ "iVBORw0KGgoAAAANSUhEUgAAA"
end
test "must convert jpeg image to correct data" do
jpg = File.read!(Path.join(__DIR__, "images/jpg.jpg"))
data = Image.as_data(jpg)
assert data =~ "image/jpeg;base64"
assert data =~ "/9j/"
end
test "must convert gif image to correct data" do
jpg = File.read!(Path.join(__DIR__, "images/gif.gif"))
data = Image.as_data(jpg)
assert data =~ "image/gif;base64"
assert data =~ "R0lGODl"
end
test "must raise error when image is not an image" do
assert_raise ArgumentError, ~r/^Invalid image/, fn ->
no_image = File.read!(Path.join(__DIR__, "images/no.image"))
Image.as_data(no_image)
end
end
end
end
| 29.277778 | 68 | 0.640417 |
731fcddd8bb9e0c37d0628c7aa5b90a7a738c6b0 | 26,619 | ex | Elixir | lib/ash/actions/relationships.ex | alexfreska/ash | b7ffc5495fd6d956199fb74cfc1f72f58bf8505a | [
"MIT"
] | null | null | null | lib/ash/actions/relationships.ex | alexfreska/ash | b7ffc5495fd6d956199fb74cfc1f72f58bf8505a | [
"MIT"
] | null | null | null | lib/ash/actions/relationships.ex | alexfreska/ash | b7ffc5495fd6d956199fb74cfc1f72f58bf8505a | [
"MIT"
] | null | null | null | defmodule Ash.Actions.Relationships do
@moduledoc false
alias Ash.Changeset
alias Ash.Engine.Request
require Ash.Query
def handle_relationship_changes(changeset) do
Enum.reduce(changeset.relationships, changeset, fn {name, data}, changeset ->
relationship = Ash.Resource.relationship(changeset.resource, name)
add_relationship_read_requests(changeset, relationship, data)
end)
end
defp add_relationship_read_requests(%{action_type: :update} = changeset, relationship, input) do
changeset
|> add_replace_requests(relationship, input)
|> add_remove_requests(relationship, input)
|> add_add_requests(relationship, input)
|> add_belongs_to_change(relationship, input)
end
defp add_relationship_read_requests(%{action_type: :create} = changeset, relationship, input) do
{input, key} =
if relationship.cardinality == :many do
case Map.fetch(input, :replace) do
{:ok, replacing} ->
{Map.update(input, :add, replacing, &Kernel.++(&1, replacing)), :add}
_ ->
{input, :add}
end
else
{input, :replace}
end
changeset
|> add_add_requests(relationship, Map.delete(input, :remove), key)
|> add_belongs_to_change(relationship, input)
end
defp add_belongs_to_change(
changeset,
%{type: :belongs_to, source_field: source_field, destination_field: destination_field},
input
) do
case Map.fetch(input, :replace) do
{:ok, nil} ->
changeset
:error ->
changeset
{:ok, replace} ->
add_belongs_to_change(changeset, replace, source_field, destination_field)
end
end
defp add_belongs_to_change(changeset, _, _), do: changeset
defp add_add_requests(changeset, relationship, input, key \\ :add) do
case Map.fetch(input, key) do
{:ok, identifiers} ->
do_add_relationship_read_requests(changeset, relationship, identifiers, key)
:error ->
changeset
end
end
defp add_belongs_to_change(changeset, identifiers, source_field, destination_field) do
case Map.get(identifiers, destination_field) do
{:ok, field_value} ->
Changeset.force_change_attribute(changeset, source_field, field_value)
_ ->
changeset
end
end
defp add_replace_requests(changeset, relationship, input) do
case Map.fetch(input, :replace) do
{:ok, identifiers} ->
changeset
|> do_add_relationship_read_requests(relationship, identifiers, :replace)
|> add_relationship_currently_related_request(relationship)
:error ->
changeset
end
end
defp add_remove_requests(changeset, relationship, input) do
case Map.fetch(input, :remove) do
{:ok, identifiers} ->
do_add_relationship_read_requests(
changeset,
relationship,
identifiers,
:remove
)
:error ->
changeset
end
end
defp do_add_relationship_read_requests(
changeset,
%{destination: destination} = relationship,
identifiers,
type
) do
relationship_name = relationship.name
{possible?, filter} =
case identifiers do
[{single_identifier, _changeset}] ->
{true, single_identifier}
[single_identifier] ->
{true, single_identifier}
nil ->
{false, []}
[] ->
{false, []}
single when is_map(single) ->
{true, Map.to_list(single)}
many ->
case Ash.Resource.primary_key(relationship.destination) do
[field] ->
{true, [{field, in: get_many_field(many, field)}]}
_ ->
{true, [or: get_many_records(many)]}
end
end
query = Ash.Query.filter(destination, ^filter)
dependencies =
if possible? do
[[:relationships, relationship_name, type, :query]]
else
[]
end
request =
Request.new(
api: changeset.api,
resource: relationship.destination,
action: Ash.Resource.primary_action!(relationship.destination, :read),
query: query,
path: [:relationships, relationship_name, type],
async?: not possible?,
authorize?: possible?,
data:
get_in(changeset.context, [
:destination_entities,
relationship.name,
relationship.destination
]) ||
Request.resolve(dependencies, fn data ->
if possible? do
query = get_in(data, [:relationships, relationship_name, type, :query])
primary_key = Ash.Resource.primary_key(query.resource)
query =
if changeset.tenant do
Ash.Query.set_tenant(query, changeset.tenant)
else
query
end
with {:ok, results} <- Ash.Actions.Read.unpaginated_read(query),
:ok <-
ensure_all_found(
changeset,
relationship_name,
type,
relationship.cardinality,
destination,
primary_key,
results
) do
{:ok, add_changes_to_results(changeset.resource, results, identifiers)}
end
else
{:ok, []}
end
end),
name: "read prior to write related #{relationship.name}"
)
changeset
|> Changeset.add_requests(request)
|> Changeset.changes_depend_on([:relationships, relationship_name, type, :data])
end
defp ensure_all_found(
changeset,
relationship_name,
type,
cardinality,
destination,
primary_key,
results
) do
search_keys =
case cardinality do
:one ->
changeset.relationships
|> get_in([relationship_name, type])
|> List.wrap()
:many ->
changeset.relationships
|> get_in([relationship_name, type])
|> Kernel.||([])
end
search_keys
|> Enum.map(fn
{pkey, %Ash.Changeset{}} ->
Map.take(pkey, primary_key)
pkey ->
Map.take(pkey, primary_key)
end)
|> Enum.reject(fn pkey ->
Enum.any?(results, fn result ->
result
|> Map.take(primary_key)
|> Kernel.==(pkey)
end)
end)
|> case do
[] ->
:ok
not_found ->
{:error,
Enum.map(not_found, fn key ->
Ash.Error.Query.NotFound.exception(resource: destination, primary_key: key)
end)}
end
end
defp add_changes_to_results(resource, results, identifiers) do
pkey = Ash.Resource.primary_key(resource)
Enum.map(results, fn result ->
case find_changes(identifiers, result, pkey) do
nil ->
result
changes ->
{result, changes}
end
end)
end
defp find_changes(identifiers, result, pkey) do
Enum.find_value(identifiers, fn
{identifier, changes} ->
cond do
is_map(identifier) && Map.take(identifier, pkey) == Map.take(result, pkey) ->
changes
match?([_], pkey) && identifier == Map.get(result, List.first(pkey)) ->
changes
true ->
nil
end
_ ->
nil
end)
end
defp get_many_field(records, field) do
Enum.map(records, fn
{record, _changes} ->
Map.get(record, field)
record ->
Map.get(record, field)
end)
end
defp get_many_records(records) do
Enum.map(records, fn
{record, _changes} ->
record
record ->
record
end)
end
def changeset(changeset) do
if changeset.relationships == %{} do
changeset
else
Request.resolve(changeset.change_dependencies, fn data ->
new_changeset =
data
|> Map.get(:relationships, %{})
|> Enum.reduce(changeset, fn {relationship, relationship_data}, changeset ->
relationship = Ash.Resource.relationship(changeset.resource, relationship)
relationship_data =
relationship_data
|> Enum.into(%{}, fn {key, value} ->
{key, value.data}
end)
|> Map.put_new(:current, [])
add_relationship_to_changeset(changeset, relationship, relationship_data)
end)
{:ok, new_changeset}
end)
end
end
defp add_relationship_to_changeset(
changeset,
%{type: :has_one} = relationship,
relationship_data
) do
case relationship_data do
%{current: [current], replace: []} ->
changeset
|> unrelate_has_one(relationship, current)
|> relate_has_one(relationship, nil)
|> add_relationship_change_metadata(relationship.name, %{current: current, replace: nil})
%{current: [current], replace: [new]} ->
changeset
|> unrelate_has_one(relationship, current)
|> relate_has_one(relationship, new)
|> add_relationship_change_metadata(relationship.name, %{current: current, replace: new})
%{current: [], replace: [new]} ->
changeset
|> relate_has_one(relationship, new)
|> add_relationship_change_metadata(relationship.name, %{current: nil, replace: new})
end
end
defp add_relationship_to_changeset(
changeset,
%{type: :belongs_to} = relationship,
relationship_data
) do
relationship_data = Map.put_new(relationship_data, :current, [])
case relationship_data do
%{current: [current], replace: []} ->
changeset
|> relate_belongs_to(relationship, nil)
|> add_relationship_change_metadata(relationship.name, %{current: current, replace: nil})
%{current: [], replace: [new]} ->
changeset
|> relate_belongs_to(relationship, new)
|> add_relationship_change_metadata(relationship.name, %{current: nil, replace: new})
%{current: [current], replace: [new]} ->
changeset
|> relate_belongs_to(relationship, new)
|> add_relationship_change_metadata(relationship.name, %{
current: current,
replace: new
})
end
end
defp add_relationship_to_changeset(
changeset,
%{type: :has_many, destination: destination} = relationship,
relationship_data
) do
pkey = Ash.Resource.primary_key(destination)
relationship_data =
case relationship_data do
%{replace: values, current: current} ->
split_relationship_data(current, values, pkey)
other ->
other
end
changeset
|> set_relationship(relationship.name, Map.get(relationship_data, :current, []))
|> remove_has_many(relationship, relationship_data, pkey)
|> relate_has_many(relationship, relationship_data, pkey)
|> add_relationship_change_metadata(relationship.name, relationship_data)
end
defp add_relationship_to_changeset(
changeset,
%{type: :many_to_many, destination: destination} = relationship,
relationship_data
) do
pkey = Ash.Resource.primary_key(destination)
join_pkey = Ash.Resource.primary_key(relationship.through)
relationship_data =
case relationship_data do
%{replace: values, current: current} ->
split_relationship_data(current, values, pkey)
other ->
other
end
changeset
|> set_relationship(relationship.name, Map.get(relationship_data, :current, []))
|> remove_many_to_many(relationship, relationship_data, join_pkey, pkey)
|> relate_many_to_many(relationship, relationship_data, pkey)
|> add_relationship_change_metadata(relationship.name, relationship_data)
end
defp split_relationship_data(current, replace, pkey) do
adding =
Enum.reject(replace, fn
{_, _} ->
false
replacing ->
any_pkey_matches?(current, replacing, pkey)
end)
removing = Enum.reject(current, &any_pkey_matches?(replace, &1, pkey))
%{add: adding, remove: removing, current: current}
end
defp relate_many_to_many(
changeset,
relationship,
%{add: add, current: current},
pkey
)
when is_list(add) do
Enum.reduce(add, changeset, fn
{to_relate_record, join_changeset}, changeset ->
do_relate_many_to_many(changeset, relationship, to_relate_record, join_changeset)
to_relate_record, changeset ->
case find_pkey_match(current, to_relate_record, pkey) do
nil ->
do_relate_many_to_many(changeset, relationship, to_relate_record)
_record ->
changeset
end
end)
end
defp relate_many_to_many(changeset, _, _, _) do
changeset
end
defp do_relate_many_to_many(changeset, relationship, to_relate_record, join_changeset \\ nil) do
Changeset.after_action(changeset, fn changeset, record ->
join_attrs = %{
relationship.source_field_on_join_table => Map.get(record, relationship.source_field),
relationship.destination_field_on_join_table =>
Map.get(to_relate_record, relationship.destination_field)
}
join_changeset
|> Kernel.||(Ash.Changeset.new(relationship.through))
|> Ash.Changeset.force_change_attributes(join_attrs)
|> Ash.Changeset.set_tenant(changeset.tenant)
|> changeset.api.create(upsert?: true, return_notifications?: true)
|> case do
{:ok, join_row, notifications} ->
{:ok,
record
|> remove_from_set_relationship(
relationship.name,
to_relate_record,
Ash.Resource.primary_key(relationship.destination)
)
|> add_to_set_relationship(relationship.name, to_relate_record)
|> remove_from_set_relationship(
relationship.join_relationship,
join_row,
Ash.Resource.primary_key(relationship.through)
)
|> add_to_set_relationship(relationship.join_relationship, join_row), notifications}
{:error, error} ->
{:error, error}
end
end)
end
defp remove_many_to_many(
changeset,
relationship,
%{current: current, remove: remove},
join_pkey,
pkey
) do
Enum.reduce(remove, changeset, fn to_remove_record, changeset ->
case find_pkey_match(current, to_remove_record, pkey) do
nil ->
changeset
to_remove_record ->
do_remove_many_to_many(changeset, relationship, to_remove_record, join_pkey, pkey)
end
end)
end
defp remove_many_to_many(changeset, _, _, _, _) do
changeset
end
defp do_remove_many_to_many(changeset, relationship, to_remove_record, join_pkey, pkey) do
Changeset.after_action(changeset, fn changeset, record ->
filter = [
{relationship.source_field_on_join_table, Map.get(record, relationship.source_field)},
{
relationship.destination_field_on_join_table,
Map.get(to_remove_record, relationship.destination_field)
}
]
relationship.through
|> Ash.Query.set_tenant(changeset.tenant)
|> Ash.Query.filter(^filter)
|> changeset.api.read_one()
|> case do
{:ok, nil} ->
changeset
{:error, error} ->
{:error, error}
{:ok, join_row} ->
destroy_and_remove(
changeset.api,
join_row,
to_remove_record,
record,
relationship,
join_pkey,
pkey,
changeset.tenant
)
end
end)
end
defp destroy_and_remove(
api,
join_row,
to_remove_record,
record,
relationship,
join_pkey,
pkey,
tenant
) do
join_row
|> Ash.Changeset.new()
|> Ash.Changeset.set_tenant(tenant)
|> api.destroy(return_notifications?: true)
|> case do
{:ok, notifications} ->
{:ok,
record
|> remove_from_set_relationship(relationship.join_relationship, join_row, join_pkey)
|> remove_from_set_relationship(relationship.name, to_remove_record, pkey),
notifications}
{:error, error} ->
{:error, error}
end
end
defp relate_has_many(changeset, relationship, %{add: add, current: _current}, _pkey)
when is_list(add) do
Enum.reduce(add, changeset, fn to_relate_record, changeset ->
Changeset.after_action(changeset, fn changeset, record ->
to_relate_record
|> Ash.Changeset.new()
|> Ash.Changeset.force_change_attribute(
relationship.destination_field,
Map.get(record, relationship.source_field)
)
|> Ash.Changeset.set_tenant(changeset.tenant)
|> changeset.api.update(return_notifications?: true)
|> case do
{:ok, related, notifications} ->
{:ok, add_to_set_relationship(record, relationship.name, related), notifications}
{:error, error} ->
{:error, error}
end
end)
end)
end
defp relate_has_many(changeset, _, _, _) do
changeset
end
defp remove_has_many(changeset, relationship, %{current: _current, remove: remove}, pkey) do
Enum.reduce(remove, changeset, fn to_relate_record, changeset ->
Changeset.after_action(changeset, fn changeset, record ->
to_relate_record
|> Ash.Changeset.new()
|> Ash.Changeset.force_change_attribute(relationship.destination_field, nil)
|> Ash.Changeset.set_tenant(changeset.tenant)
|> changeset.api.update(return_notifications?: true)
|> case do
{:ok, related, notifications} ->
{:ok, remove_from_set_relationship(record, relationship.name, related, pkey),
notifications}
{:error, error} ->
{:error, error}
end
end)
end)
end
defp remove_has_many(changeset, _, _, _) do
changeset
end
defp find_pkey_match(records, to_relate_record, pkey) do
search_pkey = Map.take(to_relate_record, pkey)
Enum.find(records, fn
{record, _changes} ->
Map.take(record, pkey) == search_pkey
record ->
Map.take(record, pkey) == search_pkey
end)
end
defp any_pkey_matches?(records, to_relate_record, pkey) do
not is_nil(find_pkey_match(records, to_relate_record, pkey))
end
defp set_relationship(changeset, relationship_name, value) do
Map.update!(changeset, :data, fn data ->
case value do
values when is_list(values) ->
Map.put(data, relationship_name, Enum.map(values, &clear_relationships/1))
value ->
Map.put(data, relationship_name, clear_relationships(value))
end
end)
end
defp add_to_set_relationship(record, relationship_name, to_relate) do
Map.update!(record, relationship_name, fn
%Ash.NotLoaded{type: :relationship} -> [clear_relationships(to_relate)]
set_relationship -> [clear_relationships(to_relate) | set_relationship]
end)
end
defp remove_from_set_relationship(record, relationship_name, to_remove, pkey) do
Map.update!(record, relationship_name, fn
%Ash.NotLoaded{} ->
[]
set_relationship ->
search_pkey = Map.take(to_remove, pkey)
Enum.reject(set_relationship, fn set -> Map.take(set, pkey) == search_pkey end)
end)
end
defp relate_belongs_to(changeset, relationship, new) do
changeset =
if new do
Changeset.force_change_attribute(
changeset,
relationship.source_field,
Map.get(new, relationship.destination_field)
)
else
Changeset.force_change_attribute(changeset, relationship.source_field, nil)
end
Changeset.after_action(changeset, fn _changeset, result ->
if new do
{:ok, Map.put(result, relationship.name, clear_relationships(new))}
else
{:ok, Map.put(result, relationship.name, nil)}
end
end)
end
defp relate_has_one(changeset, relationship, to_relate_record) do
Changeset.after_action(changeset, fn _changeset, record ->
if to_relate_record do
to_relate_record
|> Ash.Changeset.new()
|> Ash.Changeset.force_change_attribute(
relationship.destination_field,
Map.get(record, relationship.source_field)
)
|> Ash.Changeset.set_tenant(changeset.tenant)
|> changeset.api.update(return_notifications?: true)
|> case do
{:ok, related, notifications} ->
{:ok, Map.put(record, relationship.name, clear_relationships(related)), notifications}
{:error, error} ->
{:error, error}
end
else
{:ok, Map.put(record, relationship.name, nil)}
end
end)
end
defp unrelate_has_one(changeset, relationship, to_relate_record) do
Changeset.after_action(changeset, fn changeset, record ->
to_relate_record
|> Changeset.new()
|> Changeset.force_change_attribute(relationship.destination_field, nil)
|> Ash.Changeset.set_tenant(changeset.tenant)
|> changeset.api.update(return_notifications?: true)
|> case do
{:ok, _related, notifications} ->
{:ok, record, notifications}
{:error, error} ->
{:error, error}
end
end)
end
defp add_relationship_change_metadata(changeset, relationship_name, data) do
Map.update(
changeset,
:relationships,
%{relationship_name => data},
&Map.put(&1, relationship_name, data)
)
end
defp add_relationship_currently_related_request(
changeset,
%{type: :many_to_many} = relationship
) do
join_through_request = many_to_many_join_resource_request(changeset, relationship)
destination_request = many_to_many_destination_request(changeset, relationship)
requests = [join_through_request, destination_request]
changeset
|> Changeset.add_requests(requests)
|> Changeset.changes_depend_on([:relationships, relationship.name, :current, :data])
end
defp add_relationship_currently_related_request(
changeset,
%{destination: destination} = relationship
) do
value = Changeset.get_attribute(changeset, relationship.source_field)
filter_statement = [{relationship.destination_field, value}]
request =
Request.new(
api: changeset.api,
resource: destination,
action: Ash.Resource.primary_action!(relationship.destination, :read),
path: [:relationships, relationship.name, :current],
query: Ash.Query.filter(destination, ^filter_statement),
data:
Request.resolve([[:relationships, relationship.name, :current, :query]], fn data ->
query = get_in(data, [:relationships, relationship.name, :current, :query])
query =
if changeset.tenant do
Ash.Query.set_tenant(query, changeset.tenant)
else
query
end
Ash.Actions.Read.unpaginated_read(query)
end),
name: "Read related #{relationship.name} before replace"
)
changeset
|> Changeset.add_requests(request)
|> Changeset.changes_depend_on([:relationships, relationship.name, :current, :data])
end
defp many_to_many_join_resource_request(
changeset,
%{through: through} = relationship
) do
value = Changeset.get_attribute(changeset, relationship.source_field)
filter_statement = [{relationship.source_field_on_join_table, value}]
Request.new(
api: changeset.api,
resource: through,
action: Ash.Resource.primary_action!(relationship.destination, :read),
path: [:relationships, relationship.name, :current_join],
query: Ash.Query.filter(through, ^filter_statement),
data:
Request.resolve([[:relationships, relationship.name, :current_join, :query]], fn data ->
query = get_in(data, [:relationships, relationship.name, :current_join, :query])
query =
if changeset.tenant do
Ash.Query.set_tenant(query, changeset.tenant)
else
query
end
Ash.Actions.Read.unpaginated_read(query)
end),
name: "Read related join for #{relationship.name} before replace"
)
end
defp many_to_many_destination_request(
changeset,
%{destination: destination, name: name} = relationship
) do
Request.new(
api: changeset.api,
resource: destination,
action: Ash.Resource.primary_action!(relationship.destination, :read),
path: [:relationships, name, :current],
query:
Request.resolve(
[[:relationships, name, :current_join, :data]],
fn %{relationships: %{^name => %{current_join: %{data: current_join}}}} ->
field_values =
Enum.map(current_join, &Map.get(&1, relationship.destination_field_on_join_table))
filter_statement = [{relationship.destination_field, in: field_values}]
{:ok,
relationship.destination
|> Ash.Query.new(changeset.api)
|> Ash.Query.filter(^filter_statement)}
end
),
data:
Request.resolve(
[[:relationships, name, :current, :query]],
fn %{
relationships: %{
^name => %{current: %{query: query}}
}
} ->
query =
if changeset.tenant do
Ash.Query.set_tenant(query, changeset.tenant)
else
query
end
Ash.Actions.Read.unpaginated_read(query)
end
),
name: "Read related join for #{name} before replace"
)
end
defp clear_relationships(%resource{} = record) do
resource
|> Ash.Resource.relationships()
|> Enum.reduce(record, fn relationship, record ->
not_loaded = %Ash.NotLoaded{
type: :relationship,
field: relationship.name
}
Map.put(record, relationship.name, not_loaded)
end)
end
end
| 29.54384 | 98 | 0.611518 |
731fdbb06f560cf1f66fe73fbce59dd26178a315 | 2,601 | exs | Elixir | mix.exs | donatoaz/custom_rpi | ecd3b05221e2984ab00e73b5c4b6780db893950d | [
"Apache-2.0"
] | null | null | null | mix.exs | donatoaz/custom_rpi | ecd3b05221e2984ab00e73b5c4b6780db893950d | [
"Apache-2.0"
] | null | null | null | mix.exs | donatoaz/custom_rpi | ecd3b05221e2984ab00e73b5c4b6780db893950d | [
"Apache-2.0"
] | null | null | null | defmodule NervesSystemRpi.MixProject do
use Mix.Project
@app :custom_rpi
@version Path.join(__DIR__, "VERSION")
|> File.read!()
|> String.trim()
def project do
[
app: @app,
version: @version,
elixir: "~> 1.6",
compilers: Mix.compilers() ++ [:nerves_package],
nerves_package: nerves_package(),
description: description(),
package: package(),
deps: deps(),
aliases: [loadconfig: [&bootstrap/1], docs: ["docs", ©_images/1]],
docs: [extras: ["README.md"], main: "readme"]
]
end
def application do
[]
end
defp bootstrap(args) do
set_target()
Application.start(:nerves_bootstrap)
Mix.Task.run("loadconfig", args)
end
defp nerves_package do
[
type: :system,
artifact_sites: [
{:github_releases, "donatoaz/#{@app}"}
],
build_runner_opts: build_runner_opts(),
platform: Nerves.System.BR,
platform_config: [
defconfig: "nerves_defconfig"
],
checksum: package_files()
]
end
defp deps do
[
{:nerves, "~> 1.3", runtime: false},
{:nerves_system_br, "1.7.2", runtime: false},
{:nerves_toolchain_armv6_rpi_linux_gnueabi, "1.1.0", runtime: false},
{:nerves_system_linter, "~> 0.3.0", runtime: false},
{:ex_doc, "~> 0.18", only: [:dev, :test], runtime: false}
]
end
defp description do
"""
Nerves System - Raspberry Pi A+ / B+ / B
"""
end
defp package do
[
maintainers: ["Frank Hunleth", "Justin Schneck"],
files: package_files(),
licenses: ["Apache 2.0"],
links: %{"Github" => "https://github.com/donatoaz/#{@app}"}
]
end
defp package_files do
[
"fwup_include",
"rootfs_overlay",
"CHANGELOG.md",
"cmdline.txt",
"config.txt",
"fwup-revert.conf",
"fwup.conf",
"LICENSE",
"linux-4.19.defconfig",
"mix.exs",
"nerves_defconfig",
"post-build.sh",
"post-createfs.sh",
"ramoops.dts",
"README.md",
"VERSION"
]
end
# Copy the images referenced by docs, since ex_doc doesn't do this.
defp copy_images(_) do
File.cp_r("assets", "doc/assets")
end
defp build_runner_opts() do
if primary_site = System.get_env("BR2_PRIMARY_SITE") do
[make_args: ["BR2_PRIMARY_SITE=#{primary_site}"]]
else
[]
end
end
defp set_target() do
if function_exported?(Mix, :target, 1) do
apply(Mix, :target, [:target])
else
System.put_env("MIX_TARGET", "target")
end
end
end
| 22.422414 | 76 | 0.577086 |
731fe2db810ca833018ccf15af80819dddcc8749 | 182 | ex | Elixir | lib/space_mongers/unauthenticated_api_client.ex | ericgroom/space_mongers | e9f979318dca2e8ee4f685014bae585db15cd117 | [
"MIT"
] | 2 | 2021-03-18T02:00:29.000Z | 2021-04-18T06:11:07.000Z | lib/space_mongers/unauthenticated_api_client.ex | ericgroom/space_mongers | e9f979318dca2e8ee4f685014bae585db15cd117 | [
"MIT"
] | null | null | null | lib/space_mongers/unauthenticated_api_client.ex | ericgroom/space_mongers | e9f979318dca2e8ee4f685014bae585db15cd117 | [
"MIT"
] | null | null | null | defmodule SpaceMongers.UnauthenticatedApiClient do
@moduledoc false
use Tesla
plug(Tesla.Middleware.BaseUrl, "https://api.spacetraders.io")
plug(Tesla.Middleware.JSON)
end
| 20.222222 | 63 | 0.785714 |
732005c7955c7d6b15df18fba762229ae826293b | 77 | ex | Elixir | lib/built_with_elixir_web/views/page_view.ex | ospaarmann/built_with_elixir | 5919107c79f200b2035352c7ef9714f8a8f6ff4c | [
"MIT"
] | 8 | 2018-04-15T19:01:14.000Z | 2018-11-19T16:13:56.000Z | lib/built_with_elixir_web/views/page_view.ex | ospaarmann/built_with_elixir | 5919107c79f200b2035352c7ef9714f8a8f6ff4c | [
"MIT"
] | 8 | 2018-04-14T03:32:12.000Z | 2018-05-15T04:28:27.000Z | lib/built_with_elixir_web/views/page_view.ex | ospaarmann/built_with_elixir | 5919107c79f200b2035352c7ef9714f8a8f6ff4c | [
"MIT"
] | 2 | 2021-09-22T13:44:09.000Z | 2021-12-21T14:26:03.000Z | defmodule BuiltWithElixirWeb.PageView do
use BuiltWithElixirWeb, :view
end
| 19.25 | 40 | 0.844156 |
7320315e76caf461db2bd2cc1a137ffdc7b692e4 | 3,467 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2beta1_annotated_conversation_dataset.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2beta1_annotated_conversation_dataset.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2beta1_annotated_conversation_dataset.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2021-03-04T13:43:47.000Z | 2021-03-04T13:43:47.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1AnnotatedConversationDataset do
@moduledoc """
Represents an annotated conversation dataset. ConversationDataset can have multiple AnnotatedConversationDataset, each of them represents one result from one annotation task. AnnotatedConversationDataset can only be generated from annotation task, which will be triggered by LabelConversation.
## Attributes
* `completedExampleCount` (*type:* `String.t`, *default:* `nil`) - Output only. Number of examples that have annotations in the annotated conversation dataset.
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. Creation time of this annotated conversation dataset.
* `description` (*type:* `String.t`, *default:* `nil`) - Optional. The description of the annotated conversation dataset. Maximum of 10000 bytes.
* `displayName` (*type:* `String.t`, *default:* `nil`) - Required. The display name of the annotated conversation dataset. It's specified when user starts an annotation task. Maximum of 64 bytes.
* `exampleCount` (*type:* `String.t`, *default:* `nil`) - Output only. Number of examples in the annotated conversation dataset.
* `name` (*type:* `String.t`, *default:* `nil`) - Output only. AnnotatedConversationDataset resource name. Format: `projects//conversationDatasets//annotatedConversationDatasets/`
* `questionTypeName` (*type:* `String.t`, *default:* `nil`) - Output only. Question type name that identifies a labeling task. A question is a single task that a worker answers. A question type is set of related questions. Each question belongs to a particular question type. It can be used in CrowdCompute UI to filter and manage labeling tasks.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:completedExampleCount => String.t(),
:createTime => DateTime.t(),
:description => String.t(),
:displayName => String.t(),
:exampleCount => String.t(),
:name => String.t(),
:questionTypeName => String.t()
}
field(:completedExampleCount)
field(:createTime, as: DateTime)
field(:description)
field(:displayName)
field(:exampleCount)
field(:name)
field(:questionTypeName)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1AnnotatedConversationDataset do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1AnnotatedConversationDataset.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1AnnotatedConversationDataset do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 49.528571 | 350 | 0.739544 |
7320483ad8776d9fc7ea0bb805a1f4d5fbaba1e3 | 1,812 | exs | Elixir | test/oban/queue/executor_test.exs | jamilabreu/oban | 5ee1781af993c93208b324024f72c0862cf076c8 | [
"Apache-2.0"
] | null | null | null | test/oban/queue/executor_test.exs | jamilabreu/oban | 5ee1781af993c93208b324024f72c0862cf076c8 | [
"Apache-2.0"
] | null | null | null | test/oban/queue/executor_test.exs | jamilabreu/oban | 5ee1781af993c93208b324024f72c0862cf076c8 | [
"Apache-2.0"
] | null | null | null | defmodule Oban.Queue.ExecutorTest do
use Oban.Case, async: true
import ExUnit.CaptureLog
alias Oban.Queue.Executor
defmodule Worker do
use Oban.Worker
@impl Worker
def perform(%{args: %{"mode" => "ok"}}), do: :ok
def perform(%{args: %{"mode" => "warn"}}), do: {:bad, :this_will_warn}
def perform(%{args: %{"mode" => "raise"}}), do: raise(ArgumentError)
def perform(%{args: %{"mode" => "catch"}}), do: throw(:no_reason)
def perform(%{args: %{"mode" => "error"}}), do: {:error, "no reason"}
end
describe "perform/1" do
defp call_with_mode(mode) do
job = %Job{args: %{"mode" => mode}, worker: to_string(Worker)}
Config.new(repo: Repo)
|> Executor.new(job)
|> Executor.resolve_worker()
|> Executor.perform()
end
test "accepting :ok as a success" do
assert %{state: :success} = call_with_mode("ok")
end
test "raising, catching and error tuples are failures" do
assert %{state: :failure} = call_with_mode("raise")
assert %{state: :failure, error: :no_reason} = call_with_mode("catch")
assert %{state: :failure, error: "no reason"} = call_with_mode("error")
end
test "warning on unexpected return values" do
message = capture_log(fn -> %{state: :success} = call_with_mode("warn") end)
assert message =~ "Expected #{__MODULE__}.Worker.perform/2"
assert message =~ "{:bad, :this_will_warn}"
end
end
describe "new/2" do
test "include prefix in metadata for job events" do
job = %Job{args: %{"mode" => "ok"}, worker: to_string(Worker)}
assert "public" ==
[repo: Repo]
|> Config.new()
|> Executor.new(job)
|> Map.from_struct()
|> get_in([:meta, :prefix])
end
end
end
| 30.2 | 82 | 0.591611 |
73206f23d079298b8f3352516adbc5870076775a | 1,127 | ex | Elixir | lib/lib_ten_web/channels/products/orders_channel.ex | 10clouds/10Books | 622360ea190421e07d4b207700867be105894218 | [
"MIT"
] | 11 | 2018-08-29T15:59:09.000Z | 2021-08-25T16:35:13.000Z | lib/lib_ten_web/channels/products/orders_channel.ex | fram74/10Books | 9e4e280032c7f7b9625c831efa9850d999327e53 | [
"MIT"
] | 16 | 2018-08-29T15:43:52.000Z | 2021-05-09T00:53:56.000Z | lib/lib_ten_web/channels/products/orders_channel.ex | fram74/10Books | 9e4e280032c7f7b9625c831efa9850d999327e53 | [
"MIT"
] | 3 | 2019-05-29T14:22:59.000Z | 2020-06-06T12:30:54.000Z | defmodule LibTenWeb.Products.OrdersChannel do
use Phoenix.Channel
import LibTenWeb.Products.ChannelHelpers
alias LibTen.Products.Orders
def join("products:orders", _message, socket) do
products = LibTenWeb.ProductsView.render("index.json", products: Orders.list())
{:ok, %{payload: products}, socket}
end
def handle_in("update", %{"id" => product_id, "attrs" => attrs}, socket) do
role = if socket.assigns.user.is_admin, do: "admin", else: "user"
Orders.update(product_id, attrs, role, socket.assigns.user.id)
|> make_reply(socket)
end
def handle_in("create", %{"attrs" => attrs}, socket) do
Orders.create(attrs, socket.assigns.user.id)
|> make_reply(socket)
end
def handle_in("upvote", %{"id" => product_id}, socket) do
Orders.upvote(product_id, socket.assigns.user.id)
|> make_reply(socket)
end
def handle_in("downvote", %{"id" => product_id}, socket) do
Orders.downvote(product_id, socket.assigns.user.id)
|> make_reply(socket)
end
def broadcast_update(product_id) do
broadcast_update("orders", product_id, Orders.get(product_id))
end
end
| 30.459459 | 83 | 0.698314 |
732075502c3b9eda770c32cfcb82952a2a1832ba | 4,094 | ex | Elixir | lib/smppex/pdu_storage.ex | MarkMagnus/smppex | 01d2ab76164dab0ba2a37c8344b697842574054d | [
"Apache-2.0"
] | null | null | null | lib/smppex/pdu_storage.ex | MarkMagnus/smppex | 01d2ab76164dab0ba2a37c8344b697842574054d | [
"Apache-2.0"
] | null | null | null | lib/smppex/pdu_storage.ex | MarkMagnus/smppex | 01d2ab76164dab0ba2a37c8344b697842574054d | [
"Apache-2.0"
] | null | null | null | defmodule SMPPEX.PduStorage do
@moduledoc false
use GenServer
require Integer
alias :ets, as: ETS
alias SMPPEX.PduStorage
alias SMPPEX.Pdu
defstruct [
:by_sequence_number,
:next_sequence_number,
:seq_table,
:seq_key,
:seq_store
]
@type t :: %PduStorage{}
@spec start_link(list, list) :: GenServer.on_start
def start_link(params \\ [], opts \\ []) do
params = case params do
[seq_table: seq_table, seq_key: seq_key, seq_store: seq_store] ->
Enum.into([seq_table: seq_table, seq_key: seq_key, seq_store: seq_store], %{})
_ ->
SMPPEX.MemSequenceStorage.start_link()
{seq_table, seq_key} = SMPPEX.MemSequenceStorage.init_seq(params)
Enum.into(params, %{seq_table: seq_table, seq_key: seq_key, seq_store: SMPPEX.MemSequenceStorage})
end
GenServer.start_link(__MODULE__, params, opts)
end
@spec store(pid, Pdu.t, non_neg_integer) :: boolean
def store(pid, %Pdu{} = pdu, expire_time) do
GenServer.call(pid, {:store, pdu, expire_time})
end
@spec fetch(pid, non_neg_integer) :: [Pdu.t]
def fetch(pid, sequence_number) do
GenServer.call(pid, {:fetch, sequence_number})
end
@spec fetch_expired(pid, non_neg_integer) :: [Pdu.t]
def fetch_expired(pid, expire_time) do
GenServer.call(pid, {:fetch_expired, expire_time})
end
@spec reserve_sequence_number(pid) :: :pos_integer
@doc """
Reserve a sequence number by gettingfl current next sequence number and then incrementing.
Useful if you need to track sequence numbers externally.
"""
def reserve_sequence_number(pid) do
GenServer.call(pid, :reserve_sequence_number)
end
@spec stop(pid) :: :any
def stop(pid), do: GenServer.cast(pid, :stop)
def state(pid), do: GenServer.call(pid, :state)
def init(params) do
next_sequence_number = case Map.has_key?(params, :next_sequence_number) do
true ->
params.next_sequence_number
false ->
params.seq_store.get_next_seq(params.seq_table, params.seq_key)
end
Process.flag(:trap_exit, true)
{:ok, %PduStorage{
by_sequence_number: ETS.new(:pdu_storage_by_sequence_number, [:set]),
next_sequence_number: next_sequence_number,
seq_table: params.seq_table,
seq_key: params.seq_key,
seq_store: params.seq_store
}}
end
def handle_cast(:stop, st) do
raise StopException
{:noreply, st}
end
def handle_call(:state, _from, st), do: {:reply, st, st}
def handle_call({:store, pdu, expire_time}, _from, st) do
sequence_number = Pdu.sequence_number(pdu)
result = ETS.insert_new(st.by_sequence_number, {sequence_number, {expire_time, pdu}})
{:reply, result, st}
end
def handle_call({:fetch, sequence_number}, _from, st) do
case ETS.lookup(st.by_sequence_number, sequence_number) do
[{^sequence_number, {_expire_time, pdu}}] ->
true = ETS.delete(st.by_sequence_number, sequence_number)
{:reply, [pdu], st}
[] ->
{:reply, [], st}
end
end
def handle_call({:fetch_expired, expire_time}, _from, st) do
expired = ETS.select(st.by_sequence_number, [{ {:'_', {:'$1', :'$2'}}, [{:'<', :'$1', expire_time}], [:'$2']}])
expired_count = length(expired)
^expired_count = ETS.select_delete(st.by_sequence_number, [{ {:'_', {:'$1', :'$2'}}, [{:'<', :'$1', expire_time}], [true]}])
{:reply, expired, st}
end
def handle_call(:reserve_sequence_number, _from, st) do
new_next_sequence_number = st.seq_store.incr_seq(st.seq_table, st.seq_key, st.next_sequence_number)
new_st = %PduStorage{st | next_sequence_number: new_next_sequence_number}
{:reply, st.next_sequence_number, new_st}
end
def terminate(_reason, st) do
st.seq_store.save_next_seq(st.seq_table, st.seq_key, st.next_sequence_number)
end
defp increment_sequence_number(st) do
{st.next_sequence_number, }
end
end
defmodule StopException do
defexception message: "stopping process on request"
end
defimpl String.Chars, for: StopException do
def to_string(exception), do: exception.message
end
| 28.830986 | 128 | 0.68466 |
73208e66659bb358bea462c03ff345447036a5a3 | 898 | ex | Elixir | clients/data_catalog/lib/google_api/data_catalog/v1beta1/metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/data_catalog/lib/google_api/data_catalog/v1beta1/metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/data_catalog/lib/google_api/data_catalog/v1beta1/metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DataCatalog.V1beta1 do
@moduledoc """
API client metadata for GoogleApi.DataCatalog.V1beta1.
"""
@discovery_revision "20200918"
def discovery_revision(), do: @discovery_revision
end
| 33.259259 | 74 | 0.762806 |
732097f5e4b9354865ddef5778946cc9a8f370e4 | 1,231 | ex | Elixir | subjects_source/FTP/bftpd/debian/emacsen-install.ex | AminoACID123/profuzzbench | c24fcb1bb801acb68d80ec58cb6046c9a1e79853 | [
"Apache-2.0"
] | 6 | 2021-05-19T16:58:15.000Z | 2022-03-10T03:51:20.000Z | subjects_source/FTP/bftpd/debian/emacsen-install.ex | AminoACID123/profuzzbench | c24fcb1bb801acb68d80ec58cb6046c9a1e79853 | [
"Apache-2.0"
] | null | null | null | subjects_source/FTP/bftpd/debian/emacsen-install.ex | AminoACID123/profuzzbench | c24fcb1bb801acb68d80ec58cb6046c9a1e79853 | [
"Apache-2.0"
] | 4 | 2021-05-24T11:19:18.000Z | 2022-03-08T17:58:24.000Z | #! /bin/sh -e
# /usr/lib/emacsen-common/packages/install/bftpd
# Written by Jim Van Zandt <[email protected]>, borrowing heavily
# from the install scripts for gettext by Santiago Vila
# <[email protected]> and octave by Dirk Eddelbuettel <[email protected]>.
FLAVOR=$1
PACKAGE=bftpd
if [ ${FLAVOR} = emacs ]; then exit 0; fi
echo install/${PACKAGE}: Handling install for emacsen flavor ${FLAVOR}
#FLAVORTEST=`echo $FLAVOR | cut -c-6`
#if [ ${FLAVORTEST} = xemacs ] ; then
# SITEFLAG="-no-site-file"
#else
# SITEFLAG="--no-site-file"
#fi
FLAGS="${SITEFLAG} -q -batch -l path.el -f batch-byte-compile"
ELDIR=/usr/share/emacs/site-lisp/${PACKAGE}
ELCDIR=/usr/share/${FLAVOR}/site-lisp/${PACKAGE}
# Install-info-altdir does not actually exist.
# Maybe somebody will write it.
if test -x /usr/sbin/install-info-altdir; then
echo install/${PACKAGE}: install Info links for ${FLAVOR}
install-info-altdir --quiet --section "" "" --dirname=${FLAVOR} /usr/info/${PACKAGE}.info.gz
fi
install -m 755 -d ${ELCDIR}
cd ${ELDIR}
FILES=`echo *.el`
cp ${FILES} ${ELCDIR}
cd ${ELCDIR}
cat << EOF > path.el
(setq load-path (cons "." load-path) byte-compile-warnings nil)
EOF
${FLAVOR} ${FLAGS} ${FILES}
rm -f *.el path.el
exit 0
| 26.76087 | 96 | 0.685621 |
7320a45c240e34cbb2fda6a24e0586dbef1ae51f | 1,229 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/target_https_proxy.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/target_https_proxy.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/target_https_proxy.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.TargetHttpsProxy do
@moduledoc """
A TargetHttpsProxy resource. This resource defines an HTTPS proxy.
"""
@derive [Poison.Encoder]
defstruct [
:"creationTimestamp",
:"description",
:"id",
:"kind",
:"name",
:"selfLink",
:"sslCertificates",
:"urlMap"
]
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.TargetHttpsProxy do
def decode(value, _options) do
value
end
end
| 27.931818 | 77 | 0.725793 |
7320c74175cd187ca09e032385d402b76d5da5e5 | 1,989 | ex | Elixir | clients/docs/lib/google_api/docs/v1/model/suggested_bullet.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/docs/lib/google_api/docs/v1/model/suggested_bullet.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/docs/lib/google_api/docs/v1/model/suggested_bullet.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Docs.V1.Model.SuggestedBullet do
@moduledoc """
A suggested change to a Bullet.
## Attributes
* `bullet` (*type:* `GoogleApi.Docs.V1.Model.Bullet.t`, *default:* `nil`) - A Bullet that only includes the changes made in this suggestion. This can be used along with the bullet_suggestion_state to see which fields have changed and their new values.
* `bulletSuggestionState` (*type:* `GoogleApi.Docs.V1.Model.BulletSuggestionState.t`, *default:* `nil`) - A mask that indicates which of the fields on the base Bullet have been changed in this suggestion.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:bullet => GoogleApi.Docs.V1.Model.Bullet.t() | nil,
:bulletSuggestionState => GoogleApi.Docs.V1.Model.BulletSuggestionState.t() | nil
}
field(:bullet, as: GoogleApi.Docs.V1.Model.Bullet)
field(:bulletSuggestionState, as: GoogleApi.Docs.V1.Model.BulletSuggestionState)
end
defimpl Poison.Decoder, for: GoogleApi.Docs.V1.Model.SuggestedBullet do
def decode(value, options) do
GoogleApi.Docs.V1.Model.SuggestedBullet.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Docs.V1.Model.SuggestedBullet do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.78 | 255 | 0.745098 |
7320d6c9e799a029a0a82dd6b1b3db6782100027 | 1,638 | ex | Elixir | lib/petal_components/link.ex | cohawk/petal_components | 2741c4de66fa8dfa2896fe1e6e02f1d1653422cd | [
"MIT"
] | null | null | null | lib/petal_components/link.ex | cohawk/petal_components | 2741c4de66fa8dfa2896fe1e6e02f1d1653422cd | [
"MIT"
] | null | null | null | lib/petal_components/link.ex | cohawk/petal_components | 2741c4de66fa8dfa2896fe1e6e02f1d1653422cd | [
"MIT"
] | null | null | null | defmodule PetalComponents.Link do
use Phoenix.Component
# prop class, :string
# prop label, :string
# prop link_type, :string, options: ["a", "live_patch", "live_redirect"]
def link(assigns) do
assigns = assigns
|> assign_new(:class, fn -> "" end)
|> assign_new(:link_type, fn -> "a" end)
|> assign_new(:label, fn -> nil end)
|> assign_new(:inner_block, fn -> nil end)
|> assign_new(:extra_attributes, fn ->
Map.drop(assigns, [
:class,
:link_type,
:type,
:inner_block,
:label,
:__slot__,
:__changed__
])
end)
~H"""
<%= case @link_type do %>
<% "a" -> %>
<%= Phoenix.HTML.Link.link [to: @to, class: @class] ++ Map.to_list(@extra_attributes) do %>
<%= if @inner_block do %>
<%= render_slot(@inner_block) %>
<% else %>
<%= @label %>
<% end %>
<% end %>
<% "live_patch" -> %>
<%= live_patch [
to: @to,
class: @class,
] ++ Enum.to_list(@extra_attributes) do %>
<%= if @inner_block do %>
<%= render_slot(@inner_block) %>
<% else %>
<%= @label %>
<% end %>
<% end %>
<% "live_redirect" -> %>
<%= live_redirect [
to: @to,
class: @class,
] ++ Enum.to_list(@extra_attributes) do %>
<%= if @inner_block do %>
<%= render_slot(@inner_block) %>
<% else %>
<%= @label %>
<% end %>
<% end %>
<% end %>
"""
end
end
| 26.852459 | 99 | 0.445055 |
7320f2139497aa8cc64cb597347f3c9d73aa4291 | 821 | exs | Elixir | test/unit/hologram/compiler/module_def_aggregators/anonymous_function_call_test.exs | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 40 | 2022-01-19T20:27:36.000Z | 2022-03-31T18:17:41.000Z | test/unit/hologram/compiler/module_def_aggregators/anonymous_function_call_test.exs | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 42 | 2022-02-03T22:52:43.000Z | 2022-03-26T20:57:32.000Z | test/unit/hologram/compiler/module_def_aggregators/anonymous_function_call_test.exs | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 3 | 2022-02-10T04:00:37.000Z | 2022-03-08T22:07:45.000Z | defmodule Hologram.Compiler.ModuleDefAggregator.AnonymousFunctionCallTest do
use Hologram.Test.UnitCase, async: false
alias Hologram.Compiler.{ModuleDefAggregator, ModuleDefStore}
alias Hologram.Compiler.IR.{AnonymousFunctionCall, ModuleDefinition, ModuleType}
alias Hologram.Test.Fixtures.PlaceholderModule1
alias Hologram.Test.Fixtures.PlaceholderModule2
setup do
ModuleDefStore.run()
:ok
end
test "aggregates args" do
ir = %AnonymousFunctionCall{
name: :test,
args: [
%ModuleType{module: PlaceholderModule1},
%ModuleType{module: PlaceholderModule2}
]
}
ModuleDefAggregator.aggregate(ir)
assert %ModuleDefinition{} = ModuleDefStore.get!(PlaceholderModule1)
assert %ModuleDefinition{} = ModuleDefStore.get!(PlaceholderModule2)
end
end
| 28.310345 | 82 | 0.750305 |
7320f341fd908774c627759033a84d59e2fc4d69 | 1,162 | ex | Elixir | dash/lib/dash_web/channels/user_socket.ex | bhaveshpoddar94/BitcoinRealtime | e9a60ae80596271048c81cbb2c3be7e3428d2a8f | [
"MIT"
] | null | null | null | dash/lib/dash_web/channels/user_socket.ex | bhaveshpoddar94/BitcoinRealtime | e9a60ae80596271048c81cbb2c3be7e3428d2a8f | [
"MIT"
] | null | null | null | dash/lib/dash_web/channels/user_socket.ex | bhaveshpoddar94/BitcoinRealtime | e9a60ae80596271048c81cbb2c3be7e3428d2a8f | [
"MIT"
] | null | null | null | defmodule DashWeb.UserSocket do
use Phoenix.Socket
## Channels
channel "room:*", DashWeb.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# DashWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 30.578947 | 83 | 0.702238 |
7320fbaf8f3dd1b30d100882746bcbd556c10814 | 262 | exs | Elixir | mix.exs | ansd/observer_cli | d0c27528bd7230eea827f753c863d9291b2b351d | [
"MIT"
] | 1,205 | 2015-11-03T15:56:53.000Z | 2022-03-29T08:48:45.000Z | mix.exs | ansd/observer_cli | d0c27528bd7230eea827f753c863d9291b2b351d | [
"MIT"
] | 57 | 2016-04-27T08:17:31.000Z | 2022-03-20T09:50:33.000Z | mix.exs | ansd/observer_cli | d0c27528bd7230eea827f753c863d9291b2b351d | [
"MIT"
] | 89 | 2015-11-04T10:15:48.000Z | 2022-02-21T22:18:30.000Z | defmodule ObserverCli.MixProject do
use Mix.Project
def project do
[
app: :observer_cli,
version: "1.7.1",
language: :erlang,
description: "observer in shell",
deps: [
{:recon, "~> 2.5.1"},
]
]
end
end
| 15.411765 | 39 | 0.534351 |
732100b5093e59015cdb6848028c59e0de0dcf41 | 1,936 | ex | Elixir | lib/cowboy.ex | litecord/gateway | f55fad00923ff905d428a01a27b0da5c256fa3f3 | [
"MIT"
] | 3 | 2017-10-21T00:46:42.000Z | 2018-12-17T13:41:48.000Z | lib/cowboy.ex | litecord/gateway | f55fad00923ff905d428a01a27b0da5c256fa3f3 | [
"MIT"
] | 3 | 2017-10-21T19:32:01.000Z | 2017-12-08T04:56:59.000Z | lib/cowboy.ex | litecord/gateway | f55fad00923ff905d428a01a27b0da5c256fa3f3 | [
"MIT"
] | null | null | null | defmodule Gateway.DefaultHandler do
@moduledoc """
Just a default handler for /
"""
require Logger
def init(req0, state) do
Logger.info "giving a hello"
req = :cowboy_req.reply(200,
%{"content-type" => "text/plain"},
"helo",
req0
)
{:ok, req, state}
end
end
defmodule Gateway.Cowboy do
@moduledoc """
Entry point for the webserver and websocket servers.
"""
require Logger
def start_link() do
mode = Application.fetch_env!(:gateway, :mode)
start_link_bridge
case mode do
:http -> start_link_http
:https -> start_link_https
end
end
def start_link_bridge do
dispatch_config = bridge_dispatch_config()
port = Application.fetch_env!(:gateway, :bridge_port)
Logger.info "Starting bridge at :#{port}"
{:ok, _} = :cowboy.start_clear(
:litecord_bridge,
[port: port],
%{env: %{dispatch: dispatch_config}}
)
end
def start_link_http do
dispatch_config = build_dispatch_config()
port = Application.fetch_env!(:gateway, :http_port)
Logger.info "Starting http at :#{port}"
{:ok, _} = :cowboy.start_clear(
:litecord_http,
[{:port, port}],
%{env: %{dispatch: dispatch_config}}
)
end
def start_link_https() do
dispatch_config = build_dispatch_config()
port = Application.fetch_env(:gateway, :https_port)
Logger.info "Starting https at :#{port}"
{:ok, _} = :cowboy.start_tls(
:litecord_https,
[
{:port, port},
{:certfile, ""},
{:keyfile, ""}
], %{env: %{dispatch: dispatch_config}})
end
def bridge_dispatch_config do
:cowboy_router.compile([
{:_, [
{"/", Gateway.Bridge, %{}}
]}
])
end
def build_dispatch_config do
:cowboy_router.compile([
{:_, [
{"/", Gateway.DefaultHandler, []},
{"/gw", Gateway.Websocket, %{}},
]}
])
end
end
| 21.511111 | 57 | 0.59814 |
73214dc63469607e72ef273dbb00a26868d82f96 | 1,088 | ex | Elixir | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/model/connectivity_information.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/model/connectivity_information.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/model/connectivity_information.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Microsoft.Azure.Management.Network.Model.ConnectivityInformation do
@moduledoc """
Information on the connectivity status.
"""
@derive [Poison.Encoder]
defstruct [
:"hops",
:"connectionStatus",
:"avgLatencyInMs",
:"minLatencyInMs",
:"maxLatencyInMs",
:"probesSent",
:"probesFailed"
]
@type t :: %__MODULE__{
:"hops" => [ConnectivityHop],
:"connectionStatus" => String.t,
:"avgLatencyInMs" => integer(),
:"minLatencyInMs" => integer(),
:"maxLatencyInMs" => integer(),
:"probesSent" => integer(),
:"probesFailed" => integer()
}
end
defimpl Poison.Decoder, for: Microsoft.Azure.Management.Network.Model.ConnectivityInformation do
import Microsoft.Azure.Management.Network.Deserializer
def decode(value, options) do
value
|> deserialize(:"hops", :list, Microsoft.Azure.Management.Network.Model.ConnectivityHop, options)
end
end
| 27.2 | 101 | 0.693015 |
73214fbbabf44c536fa11d7491c5cefdbdc0faba | 8,738 | ex | Elixir | lib/elasticsearch/indexing/index.ex | marcelolebre/elasticsearch-elixir | 1297d73fcc5b7b0bd43c081d563f716234849ae8 | [
"MIT"
] | null | null | null | lib/elasticsearch/indexing/index.ex | marcelolebre/elasticsearch-elixir | 1297d73fcc5b7b0bd43c081d563f716234849ae8 | [
"MIT"
] | null | null | null | lib/elasticsearch/indexing/index.ex | marcelolebre/elasticsearch-elixir | 1297d73fcc5b7b0bd43c081d563f716234849ae8 | [
"MIT"
] | null | null | null | defmodule Elasticsearch.Index do
@moduledoc """
Functions for manipulating Elasticsearch indexes.
"""
alias Elasticsearch.{
Cluster.Config,
Index.Bulk
}
@doc """
Creates an index using a zero-downtime hot-swap technique.
1. Build an index for the given `alias`, with a timestamp: `alias-12323123`
2. Bulk upload data to that index using `store` and `sources`.
3. Alias the `alias` to `alias-12323123`.
4. Remove old indexes beginning with `alias`.
5. Refresh `alias-12323123`.
This allows an old index to be served while a new index for `alias` is built.
## Example
iex> Index.hot_swap(Cluster, "posts")
:ok
"""
@spec hot_swap(Cluster.t(), alias :: String.t() | atom) ::
:ok | {:error, Elasticsearch.Exception.t()}
def hot_swap(cluster, alias) do
alias = alias_to_atom(alias)
name = build_name(alias)
config = Config.get(cluster)
%{settings: settings_file} = index_config = config[:indexes][alias]
with :ok <- create_from_file(config, name, settings_file),
:ok <- Bulk.upload(config, name, index_config),
:ok <- __MODULE__.alias(config, name, alias),
:ok <- clean_starting_with(config, alias, 2),
:ok <- refresh(config, name) do
:ok
end
end
defp alias_to_atom(atom) when is_atom(atom), do: atom
defp alias_to_atom(str) when is_binary(str), do: String.to_existing_atom(str)
@doc """
Returns all indexes which start with a given string.
## Example
iex> Index.create_from_file(Cluster, "posts-1", "test/support/settings/posts.json")
...> Index.starting_with(Cluster, "posts")
{:ok, ["posts-1"]}
"""
@spec starting_with(Cluster.t(), String.t() | atom) ::
{:ok, [String.t()]}
| {:error, Elasticsearch.Exception.t()}
def starting_with(cluster, prefix) do
with {:ok, indexes} <- Elasticsearch.get(cluster, "/_cat/indices?format=json") do
prefix = prefix |> to_string() |> Regex.escape()
{:ok, regex} = Regex.compile("^#{prefix}-[0-9]+$")
indexes =
indexes
|> Enum.map(& &1["index"])
|> Enum.filter(&Regex.match?(regex, &1))
|> Enum.sort()
{:ok, indexes}
end
end
@doc """
Assigns an alias to a given index, simultaneously removing it from prior
indexes, with zero downtime.
## Example
iex> Index.create_from_file(Cluster, "posts-1", "test/support/settings/posts.json")
...> Index.alias(Cluster, "posts-1", "posts")
:ok
"""
@spec alias(Cluster.t(), String.t(), String.t()) ::
:ok
| {:error, Elasticsearch.Exception.t()}
def alias(cluster, name, alias) do
with {:ok, indexes} <- starting_with(cluster, alias),
indexes = Enum.reject(indexes, &(&1 == name)) do
remove_actions =
Enum.map(indexes, fn index ->
%{"remove" => %{"index" => index, "alias" => alias}}
end)
actions = %{
"actions" => remove_actions ++ [%{"add" => %{"index" => name, "alias" => alias}}]
}
with {:ok, _response} <- Elasticsearch.post(cluster, "/_aliases", actions), do: :ok
end
end
@doc """
Gets the most recent index name with the given prefix.
## Examples
iex> Index.create_from_file(Cluster, "posts-1", "test/support/settings/posts.json")
...> Index.create_from_file(Cluster, "posts-2", "test/support/settings/posts.json")
...> Index.latest_starting_with(Cluster, "posts")
{:ok, "posts-2"}
If there are no indexes matching that prefix:
iex> Index.latest_starting_with(Cluster, "nonexistent")
{:error, :not_found}
"""
@spec latest_starting_with(Cluster.t(), String.t() | atom) ::
{:ok, String.t()}
| {:error, :not_found}
| {:error, Elasticsearch.Exception.t()}
def latest_starting_with(cluster, prefix) do
with {:ok, indexes} <- starting_with(cluster, prefix) do
index =
indexes
|> Enum.sort()
|> List.last()
case index do
nil -> {:error, :not_found}
index -> {:ok, index}
end
end
end
@doc """
Refreshes a given index with recently added data.
## Example
iex> Index.create_from_file(Cluster, "posts-1", "test/support/settings/posts.json")
...> Index.refresh(Cluster, "posts-1")
:ok
"""
@spec refresh(Cluster.t(), String.t()) :: :ok | {:error, Elasticsearch.Exception.t()}
def refresh(cluster, name) do
with {:ok, _} <- Elasticsearch.post(cluster, "/#{name}/_forcemerge?max_num_segments=5", %{}),
{:ok, _} <- Elasticsearch.post(cluster, "/#{name}/_refresh", %{}),
do: :ok
end
@doc """
Same as `refresh/1`, but raises an error on failure.
## Examples
iex> Index.create_from_file(Cluster, "posts-1", "test/support/settings/posts.json")
...> Index.refresh!(Cluster, "posts-1")
:ok
iex> Index.refresh!(Cluster, "nonexistent")
** (Elasticsearch.Exception) (index_not_found_exception) no such index
"""
@spec refresh!(Cluster.t(), String.t()) :: :ok
def refresh!(cluster, name) do
case refresh(cluster, name) do
:ok ->
:ok
{:error, error} ->
raise error
end
end
@doc """
Removes indexes starting with the given prefix, keeping a certain number.
Can be used to garbage collect old indexes that are no longer used.
## Examples
If there is only one index, and `num_to_keep` is >= 1, the index is not deleted.
iex> Index.create_from_file(Cluster, "posts-1", "test/support/settings/posts.json")
...> Index.clean_starting_with(Cluster, "posts", 1)
...> Index.starting_with(Cluster, "posts")
{:ok, ["posts-1"]}
If `num_to_keep` is less than the number of indexes, the older indexes are
deleted.
iex> Index.create_from_file(Cluster, "posts-1", "test/support/settings/posts.json")
...> Index.clean_starting_with(Cluster, "posts", 0)
...> Index.starting_with(Cluster, "posts")
{:ok, []}
"""
@spec clean_starting_with(Cluster.t(), String.t(), integer) ::
:ok
| {:error, [Elasticsearch.Exception.t()]}
def clean_starting_with(cluster, prefix, num_to_keep) when is_integer(num_to_keep) do
with {:ok, indexes} <- starting_with(cluster, prefix) do
total = length(indexes)
num_to_delete = total - num_to_keep
num_to_delete = if num_to_delete >= 0, do: num_to_delete, else: 0
errors =
indexes
|> Enum.sort()
|> Enum.take(num_to_delete)
|> Enum.map(&Elasticsearch.delete(cluster, "/#{&1}"))
|> Enum.filter(&(elem(&1, 0) == :error))
|> Enum.map(&elem(&1, 1))
if length(errors) > 0 do
{:error, errors}
else
:ok
end
end
end
@doc """
Creates an index with the given name from either a JSON string or Elixir map.
## Examples
iex> Index.create(Cluster, "posts-1", "{}")
:ok
"""
@spec create(Cluster.t(), String.t(), map | String.t()) ::
:ok
| {:error, Elasticsearch.Exception.t()}
def create(cluster, name, settings) do
with {:ok, _response} <- Elasticsearch.put(cluster, "/#{name}", settings), do: :ok
end
@doc """
Creates an index with the given name, with settings loaded from a JSON file.
## Example
iex> Index.create_from_file(Cluster, "posts-1", "test/support/settings/posts.json")
:ok
iex> Index.create_from_file(Cluster, "posts-1", "nonexistent.json")
{:error, :enoent}
The `posts.json` file contains regular index settings as described in the
Elasticsearch [documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping.html#_example_mapping):
{
"mappings": {
"post": {
"properties": {
"title": {
"type": "string"
},
"author": {
"type": "string"
}
}
}
}
}
"""
@spec create_from_file(Cluster.t(), String.t(), Path.t()) ::
:ok
| {:error, File.posix()}
| {:error, Elasticsearch.Exception.t()}
def create_from_file(cluster, name, file) do
with {:ok, settings} <- File.read(file) do
create(cluster, name, settings)
end
end
@doc """
Generates a name for an index that will be aliased to a given `alias`.
Similar to migrations, the name will contain a timestamp.
## Example
Index.build_name("main")
# => "main-1509581256"
"""
@spec build_name(String.t() | atom) :: String.t()
def build_name(alias) do
"#{alias}-#{system_timestamp()}"
end
defp system_timestamp do
DateTime.to_unix(DateTime.utc_now(), :microsecond)
end
end
| 29.721088 | 127 | 0.602312 |
73215348f896aa0911f75ec1778a14c01d6f31cf | 1,380 | ex | Elixir | clients/genomics/lib/google_api/genomics/v1/model/import_variants_response.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/genomics/lib/google_api/genomics/v1/model/import_variants_response.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/genomics/lib/google_api/genomics/v1/model/import_variants_response.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Genomics.V1.Model.ImportVariantsResponse do
@moduledoc """
The variant data import response.
## Attributes
- callSetIds (List[String]): IDs of the call sets created during the import. Defaults to: `null`.
"""
defstruct [
:"callSetIds"
]
end
defimpl Poison.Decoder, for: GoogleApi.Genomics.V1.Model.ImportVariantsResponse do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.Genomics.V1.Model.ImportVariantsResponse do
def encode(value, options) do
GoogleApi.Genomics.V1.Deserializer.serialize_non_nil(value, options)
end
end
| 30 | 99 | 0.756522 |
7321804bdcdc578ae1cb7420feff5e7fddd210af | 3,151 | ex | Elixir | lib/ex_config/option_normalizer.ex | djthread/ex_config | 6900ffe3c0cdc089a224621749bfd5d32cced562 | [
"MIT"
] | null | null | null | lib/ex_config/option_normalizer.ex | djthread/ex_config | 6900ffe3c0cdc089a224621749bfd5d32cced562 | [
"MIT"
] | null | null | null | lib/ex_config/option_normalizer.ex | djthread/ex_config | 6900ffe3c0cdc089a224621749bfd5d32cced562 | [
"MIT"
] | null | null | null | defmodule ExConfig.OptionNormalizer do
@moduledoc """
Helper tools for validating and creating fallback values for options
"""
@env_prefix_regex ~r/^[A-Z][A-Z0-9_]*$/
@default_valid_environments ~w(dev test beta prod)a
# The default data sources to poll, in order
@data_sources [
ExConfig.EnvironmentDataSource,
ExConfig.EnvConfigDataSource,
ExConfig.ApplicationEnvironmentDataSource
]
@type opts :: Keyword.t()
@doc """
Given opts, raise on bad values, fill defaults for missing values, and
return the normalized opts.
"""
@spec normalize_opts!(opts) :: opts
def normalize_opts!(opts) do
opts
|> Keyword.put(:app, normalize_app!(opts))
|> Keyword.put(:env_prefix, normalize_env_prefix!(opts))
|> Keyword.put(:valid_environments, normalize_valid_environments!(opts))
|> Keyword.put(:sections, normalize_sections!(opts))
|> Keyword.put(:data_sources, normalize_data_sources!(opts))
end
def normalize_app!(opts) do
case Keyword.fetch(opts, :app) do
{:ok, app} when is_atom(app) ->
app
{:ok, not_atom} ->
raise ArgumentError, "Invalid `:app`: #{not_atom}"
:error ->
opts
|> Keyword.get(:module)
|> Module.split()
|> Enum.take(1)
|> hd()
|> Macro.underscore()
|> String.to_atom()
end
end
def normalize_env_prefix!(opts) do
with {:ok, val} when byte_size(val) > 0 <- Keyword.fetch(opts, :env_prefix),
{_, true} <- {val, Regex.match?(@env_prefix_regex, val)} do
val
else
{:ok, val} ->
raise ArgumentError, "Invalid `:env_prefix`: #{inspect(val)}"
{val, false} ->
raise ArgumentError, "Invalid `:env_prefix`: #{inspect(val)}"
:error ->
opts
|> Keyword.get(:module)
|> Module.split()
|> Enum.take(1)
|> hd()
|> String.upcase()
end
end
def normalize_valid_environments!(opts) do
case Keyword.fetch(opts, :valid_environments) do
{:ok, envs} when envs != [] ->
validate_all_atoms!(envs, "env")
envs
:error ->
@default_valid_environments
end
end
def normalize_sections!(opts) do
case Keyword.fetch(opts, :sections) do
{:ok, sections} ->
validate_all_atoms!(sections, "section")
sections
:error ->
[]
end
end
def normalize_data_sources!(opts) do
data_sources = Keyword.get(opts, :data_sources, @data_sources)
Enum.each(data_sources, fn ds ->
behaviours = Keyword.get(ds.module_info(:attributes), :behaviour, [])
if ExConfig.DataSource not in behaviours do
raise ArgumentError, """
Data source does not implement `ExConfig.DataSource` behaviour: \
#{ds}\
"""
end
end)
data_sources
end
defp validate_all_atoms!(atoms, name) when is_list(atoms) do
Enum.each(atoms, fn a ->
is_atom(a) || raise ArgumentError, "Invalid #{name} atom: #{inspect(a)}"
end)
end
defp validate_all_atoms!(atoms, name) do
raise ArgumentError, "Invalid #{name} atom list: #{inspect(atoms)}"
end
end
| 26.041322 | 80 | 0.619169 |
7321827744c14a4fd43855c2f4d7fc00f0430495 | 2,040 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/publisher_provided_forecast.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/publisher_provided_forecast.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/publisher_provided_forecast.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.AdExchangeBuyer.V14.Model.PublisherProvidedForecast do
@moduledoc """
This message carries publisher provided forecasting information.
## Attributes
* `dimensions` (*type:* `list(GoogleApi.AdExchangeBuyer.V14.Model.Dimension.t)`, *default:* `nil`) - Publisher provided dimensions. E.g. geo, sizes etc...
* `weeklyImpressions` (*type:* `String.t`, *default:* `nil`) - Publisher provided weekly impressions.
* `weeklyUniques` (*type:* `String.t`, *default:* `nil`) - Publisher provided weekly uniques.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:dimensions => list(GoogleApi.AdExchangeBuyer.V14.Model.Dimension.t()),
:weeklyImpressions => String.t(),
:weeklyUniques => String.t()
}
field(:dimensions, as: GoogleApi.AdExchangeBuyer.V14.Model.Dimension, type: :list)
field(:weeklyImpressions)
field(:weeklyUniques)
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V14.Model.PublisherProvidedForecast do
def decode(value, options) do
GoogleApi.AdExchangeBuyer.V14.Model.PublisherProvidedForecast.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V14.Model.PublisherProvidedForecast do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.490566 | 158 | 0.741176 |
732193f378a2aa5e64315b8cf3456988854a6e69 | 1,072 | ex | Elixir | {{cookiecutter.app_name}}/lib/{{cookiecutter.app_name}}/guardian.ex | StephaneRob/cookiecutter-phoenix | c71a01a582fe8d57999d646cea7cbd820a4c73ca | [
"BSD-2-Clause"
] | 4 | 2018-01-16T15:40:04.000Z | 2020-01-11T19:34:42.000Z | {{cookiecutter.app_name}}/lib/{{cookiecutter.app_name}}/guardian.ex | StephaneRob/cookiecutter-phoenix | c71a01a582fe8d57999d646cea7cbd820a4c73ca | [
"BSD-2-Clause"
] | 4 | 2018-03-10T14:18:37.000Z | 2018-03-13T20:43:06.000Z | {{cookiecutter.app_name}}/lib/{{cookiecutter.app_name}}/guardian.ex | StephaneRob/cookiecutter-phoenix | c71a01a582fe8d57999d646cea7cbd820a4c73ca | [
"BSD-2-Clause"
] | 1 | 2019-10-11T20:52:31.000Z | 2019-10-11T20:52:31.000Z | defmodule {{cookiecutter.app_name.split('_')|map('title')|join}}.Guardian do
use Guardian, otp_app: :{{cookiecutter.app_name}}
alias {{cookiecutter.app_name.split('_')|map('title')|join}}.Accounts
def subject_for_token(resource, claims) do
# You can use any value for the subject of your token but
# it should be useful in retrieving the resource later, see
# how it being used on `resource_from_claims/1` function.
# A unique `id` is a good subject, a non-unique email address
# is a poor subject.
sub = to_string(resource.id)
{:ok, sub}
end
def subject_for_token(_, _) do
{:error, :reason_for_error}
end
def resource_from_claims(claims) do
# Here we'll look up our resource from the claims, the subject can be
# found in the `"sub"` key. In `above subject_for_token/2` we returned
# the resource id so here we'll rely on that to look it up.
id = claims["sub"]
resource = Accounts.get_user!(id)
{:ok, resource}
end
def resource_from_claims(_claims) do
{:error, :reason_for_error}
end
end
| 35.733333 | 76 | 0.692164 |
7321976575ba4e9e071fa91896577b3e31ab32d3 | 623 | ex | Elixir | bryan_hunter+elixir+hashring/lib/globo/actor_supervisor.ex | NashFP/globo | 630cdb401dd37f72f0b747a5870f48a23f6961b9 | [
"MIT"
] | 2 | 2020-10-28T03:09:51.000Z | 2020-10-28T14:57:52.000Z | bryan_hunter+elixir+hashring/lib/globo/actor_supervisor.ex | NashFP/globo | 630cdb401dd37f72f0b747a5870f48a23f6961b9 | [
"MIT"
] | null | null | null | bryan_hunter+elixir+hashring/lib/globo/actor_supervisor.ex | NashFP/globo | 630cdb401dd37f72f0b747a5870f48a23f6961b9 | [
"MIT"
] | null | null | null | defmodule Globo.ActorSupervisor do
use DynamicSupervisor
def start_link() do
DynamicSupervisor.start_link(__MODULE__, [], name: __MODULE__)
end
def init(_) do
DynamicSupervisor.init(strategy: :one_for_one)
end
def start_child(key) do
spec = %{id: Globo.Actor, start: {Globo.Actor, :start_link, [key]}}
DynamicSupervisor.start_child(__MODULE__, spec)
end
def terminate_child(key) do
Registry.lookup(:actor_registry, key)
|> case do
[{pid, _}] ->
DynamicSupervisor.terminate_child(__MODULE__, pid)
:terminated
_ ->
:not_found
end
end
end
| 21.482759 | 71 | 0.672552 |
7321bfd272beab18aea97bc91dd599b33164d7b9 | 1,629 | exs | Elixir | mix.exs | viniciusd/bank-account-opening | a36e5c3c8f32e48bf42af958119aef85ff1eeeaa | [
"MIT"
] | null | null | null | mix.exs | viniciusd/bank-account-opening | a36e5c3c8f32e48bf42af958119aef85ff1eeeaa | [
"MIT"
] | null | null | null | mix.exs | viniciusd/bank-account-opening | a36e5c3c8f32e48bf42af958119aef85ff1eeeaa | [
"MIT"
] | null | null | null | defmodule BankAccount.MixProject do
use Mix.Project
def project do
[
app: :bank_account_opening,
version: "0.1.0",
elixir: "~> 1.5",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {BankAccount.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.5.0"},
{:phoenix_live_dashboard, "~> 0.2.0"},
{:phoenix_pubsub, "~> 2.0"},
{:phoenix_ecto, "~> 4.0"},
{:ecto_sql, "~> 3.1"},
{:postgrex, ">= 0.0.0"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.1"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 26.274194 | 79 | 0.585635 |
7321c34d0b32f6260bb95d9b15403bc965e7abad | 1,038 | ex | Elixir | test/support/conn_case.ex | ravernkoh/awesome-lists | ed8f93d6bc1e5968303a484ebe8f0036ddd0db7b | [
"MIT"
] | null | null | null | test/support/conn_case.ex | ravernkoh/awesome-lists | ed8f93d6bc1e5968303a484ebe8f0036ddd0db7b | [
"MIT"
] | null | null | null | test/support/conn_case.ex | ravernkoh/awesome-lists | ed8f93d6bc1e5968303a484ebe8f0036ddd0db7b | [
"MIT"
] | null | null | null | defmodule AwesomeWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
import AwesomeWeb.Router.Helpers
# The default endpoint for testing
@endpoint AwesomeWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Awesome.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Awesome.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 26.615385 | 69 | 0.719653 |
7321d8dae9c1b3841bcd3be849b02d1b0f83669c | 943 | ex | Elixir | lib/telepath/application.ex | jdangerx/telepath | 75314db05dd97ea86b5a828ab673fed33ecad5e1 | [
"MIT"
] | null | null | null | lib/telepath/application.ex | jdangerx/telepath | 75314db05dd97ea86b5a828ab673fed33ecad5e1 | [
"MIT"
] | null | null | null | lib/telepath/application.ex | jdangerx/telepath | 75314db05dd97ea86b5a828ab673fed33ecad5e1 | [
"MIT"
] | null | null | null | defmodule Telepath.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
# List all child processes to be supervised
children = [
# Start our little state wrapper
Telepath.State,
# Start the endpoint when the application starts
TelepathWeb.Endpoint
# Starts a worker by calling: Telepath.Worker.start_link(arg)
# {Telepath.Worker, arg},
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Telepath.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
TelepathWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 29.46875 | 67 | 0.716861 |
732212156f2d1f52362c6a34a3e2e285b8a9c18d | 2,225 | exs | Elixir | config/dev.exs | baseballlover723/test_empty_app | fd7046ea8ee88e1c0eefee82fe95aecc7ede3f82 | [
"MIT"
] | null | null | null | config/dev.exs | baseballlover723/test_empty_app | fd7046ea8ee88e1c0eefee82fe95aecc7ede3f82 | [
"MIT"
] | null | null | null | config/dev.exs | baseballlover723/test_empty_app | fd7046ea8ee88e1c0eefee82fe95aecc7ede3f82 | [
"MIT"
] | null | null | null | use Mix.Config
# Configure your database
config :test_empty_app, TestEmptyApp.Repo,
username: "postgres",
password: "postgres",
database: "test_empty_app_dev",
hostname: "localhost",
show_sensitive_data_on_connection_error: true,
pool_size: 10
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :test_empty_app_web, TestEmptyAppWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../apps/test_empty_app_web/assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :test_empty_app_web, TestEmptyAppWeb.Endpoint,
live_reload: [
patterns: [
~r"priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$",
~r"priv/gettext/.*(po)$",
~r"lib/test_empty_app_web/(live|views)/.*(ex)$",
~r"lib/test_empty_app_web/templates/.*(eex)$"
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
| 28.896104 | 68 | 0.702022 |
73223983aaa099e7f7542253a2b604b68e4af5de | 1,299 | ex | Elixir | lib/grizzly/zwave/commands/node_location_set.ex | jellybob/grizzly | 290bee04cb16acbb9dc996925f5c501697b7ac94 | [
"Apache-2.0"
] | null | null | null | lib/grizzly/zwave/commands/node_location_set.ex | jellybob/grizzly | 290bee04cb16acbb9dc996925f5c501697b7ac94 | [
"Apache-2.0"
] | null | null | null | lib/grizzly/zwave/commands/node_location_set.ex | jellybob/grizzly | 290bee04cb16acbb9dc996925f5c501697b7ac94 | [
"Apache-2.0"
] | null | null | null | defmodule Grizzly.ZWave.Commands.NodeLocationSet do
@moduledoc """
This command is used to set the location of the receiving node.
Params:
* `:encoding` - one of :ascii, :extended_ascii, :utf_16
* `:location` - a string location for the node
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.{Command, DecodeError}
alias Grizzly.ZWave.CommandClasses.NodeNaming
@type param :: {:location, String.t()} | {:encoding, :ascii | :extended_ascii | :utf_16}
@impl true
def new(params) do
command = %Command{
name: :node_location_set,
command_byte: 0x04,
command_class: NodeNaming,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
def encode_params(command) do
encoding = Command.param!(command, :encoding)
location = Command.param!(command, :location)
encoding_byte = NodeNaming.encoding_to_byte(encoding)
<<0x00::size(5), encoding_byte::size(3)>> <> location
end
@impl true
def decode_params(<<_reserved::size(5), encoding_byte::size(3), location::binary>>) do
with {:ok, encoding} <- NodeNaming.encoding_from_byte(encoding_byte) do
{:ok, [encoding: encoding, location: location]}
else
{:error, %DecodeError{}} = error ->
error
end
end
end
| 25.470588 | 90 | 0.668206 |
732242d744b418eaaf713bfe2c10b2682033ffbd | 132 | ex | Elixir | wabanex/lib/wabanex_web/resolvers/training.ex | vscGabriel/Wabanex | 3c21b97995d6858058767ca92feab4ed1a7ca269 | [
"MIT"
] | null | null | null | wabanex/lib/wabanex_web/resolvers/training.ex | vscGabriel/Wabanex | 3c21b97995d6858058767ca92feab4ed1a7ca269 | [
"MIT"
] | null | null | null | wabanex/lib/wabanex_web/resolvers/training.ex | vscGabriel/Wabanex | 3c21b97995d6858058767ca92feab4ed1a7ca269 | [
"MIT"
] | null | null | null | defmodule WabanexWeb.Resolvers.Training do
def create(%{input: params}, _context), do: Wabanex.Trainings.Create.call(params)
end
| 26.4 | 83 | 0.780303 |
732255be522fa34650e60dd029135607414fcc67 | 543 | ex | Elixir | lib/gh_web/views/changeset_view.ex | snamiki1212/example-elixir-phoenix-nuron-graphql | 57b7f255db683a880d1b31f65a4328606d4d0009 | [
"MIT"
] | null | null | null | lib/gh_web/views/changeset_view.ex | snamiki1212/example-elixir-phoenix-nuron-graphql | 57b7f255db683a880d1b31f65a4328606d4d0009 | [
"MIT"
] | null | null | null | lib/gh_web/views/changeset_view.ex | snamiki1212/example-elixir-phoenix-nuron-graphql | 57b7f255db683a880d1b31f65a4328606d4d0009 | [
"MIT"
] | null | null | null | defmodule GhWeb.ChangesetView do
use GhWeb, :view
@doc """
Traverses and translates changeset errors.
See `Ecto.Changeset.traverse_errors/2` and
`GhWeb.ErrorHelpers.translate_error/1` for more details.
"""
def translate_errors(changeset) do
Ecto.Changeset.traverse_errors(changeset, &translate_error/1)
end
def render("error.json", %{changeset: changeset}) do
# When encoded, the changeset returns its errors
# as a JSON object. So we just pass it forward.
%{errors: translate_errors(changeset)}
end
end
| 27.15 | 65 | 0.731123 |
732261230211c72e075c753039cf52ff2b9e2c26 | 146 | ex | Elixir | lib/rankings_web/controllers/virtual_controller.ex | spkane31/cc-rankings | 0acda9f3ca35abd4874ab06478ad22aa473811bf | [
"MIT"
] | 1 | 2020-06-28T19:31:07.000Z | 2020-06-28T19:31:07.000Z | lib/rankings_web/controllers/virtual_controller.ex | spkane31/rankings | 0acda9f3ca35abd4874ab06478ad22aa473811bf | [
"MIT"
] | 5 | 2019-07-25T17:00:39.000Z | 2019-07-25T17:12:07.000Z | lib/rankings_web/controllers/virtual_controller.ex | spkane31/rankings | 0acda9f3ca35abd4874ab06478ad22aa473811bf | [
"MIT"
] | null | null | null | defmodule RankingsWeb.VirtualController do
use RankingsWeb, :controller
def index(conn, _params) do
render(conn, "index.html")
end
end
| 18.25 | 42 | 0.746575 |
7322741ba0229cf1131ab8113d603c85d3c7b719 | 1,081 | ex | Elixir | apps/omg_watcher_rpc/lib/web/controllers/challenge.ex | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher_rpc/lib/web/controllers/challenge.ex | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher_rpc/lib/web/controllers/challenge.ex | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019-2020 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.WatcherRPC.Web.Controller.Challenge do
@moduledoc """
Handles exit challenges
"""
use OMG.WatcherRPC.Web, :controller
alias OMG.Utxo
alias OMG.Watcher.API
@doc """
Challenges exits
"""
def get_utxo_challenge(conn, params) do
with {:ok, utxo_pos} <- expect(params, "utxo_pos", :pos_integer),
{:ok, utxo} <- Utxo.Position.decode(utxo_pos) do
utxo
|> API.Utxo.create_challenge()
|> api_response(conn, :challenge)
end
end
end
| 29.216216 | 74 | 0.714154 |
7322a14b38a6acc375a7298eabd08fe00dcec783 | 8,613 | ex | Elixir | lib/y_2021/d3/day3.ex | jkantarek/advent_of_code | 0741ac1e1549f81f64665d5dc6de21768bbeb405 | [
"Unlicense"
] | null | null | null | lib/y_2021/d3/day3.ex | jkantarek/advent_of_code | 0741ac1e1549f81f64665d5dc6de21768bbeb405 | [
"Unlicense"
] | null | null | null | lib/y_2021/d3/day3.ex | jkantarek/advent_of_code | 0741ac1e1549f81f64665d5dc6de21768bbeb405 | [
"Unlicense"
] | null | null | null | defmodule AdventOfCode.Y2021.Day3 do
@moduledoc """
--- Day 3: Binary Diagnostic ---
The submarine has been making some odd creaking noises, so you ask it to produce a diagnostic report just in case.
The diagnostic report (your puzzle input) consists of a list of binary numbers which, when decoded properly, can tell you many useful things about the conditions of the submarine. The first parameter to check is the power consumption.
You need to use the binary numbers in the diagnostic report to generate two new binary numbers (called the gamma rate and the epsilon rate). The power consumption can then be found by multiplying the gamma rate by the epsilon rate.
Each bit in the gamma rate can be determined by finding the most common bit in the corresponding position of all numbers in the diagnostic report. For example, given the following diagnostic report:
00100
11110
10110
10111
10101
01111
00111
11100
10000
11001
00010
01010
Considering only the first bit of each number, there are five 0 bits and seven 1 bits. Since the most common bit is 1, the first bit of the gamma rate is 1.
The most common second bit of the numbers in the diagnostic report is 0, so the second bit of the gamma rate is 0.
The most common value of the third, fourth, and fifth bits are 1, 1, and 0, respectively, and so the final three bits of the gamma rate are 110.
So, the gamma rate is the binary number 10110, or 22 in decimal.
The epsilon rate is calculated in a similar way; rather than use the most common bit, the least common bit from each position is used. So, the epsilon rate is 01001, or 9 in decimal. Multiplying the gamma rate (22) by the epsilon rate (9) produces the power consumption, 198.
Use the binary numbers in your diagnostic report to calculate the gamma rate and epsilon rate, then multiply them together. What is the power consumption of the submarine? (Be sure to represent your answer in decimal, not binary.)
## Examples
iex> AdventOfCode.Y2021.Day3.part1()
1092896
"""
def part1() do
parse_file()
|> transpose_rows()
|> compute_gamma_eps()
end
def compute_gamma_eps(columns) do
columns
|> Enum.map(fn col ->
Enum.frequencies(col)
|> get_max_val()
end)
|> build_gamma_eps()
|> solve()
end
def build_gamma_eps(gamma) do
eps =
gamma
|> Enum.map(fn dig ->
case dig do
0 -> 1
1 -> 0
end
end)
[gamma, eps]
end
def solve([gamma, eps]) do
[gamma, eps]
|> Enum.reduce(1, fn arr, acc ->
bin_int = bin_arr_to_int(arr)
acc * bin_int
end)
end
def bin_arr_to_int(arr) do
{bin_int, ""} = Enum.join(arr) |> Integer.parse(2)
bin_int
end
def get_max_val(%{0 => zeros, 1 => ones}) when zeros > ones, do: 0
def get_max_val(%{0 => zeros, 1 => ones}) when zeros <= ones, do: 1
def solve_gamma(weights, rows) do
rows
|> Enum.map(fn row -> row ++ weights end)
end
def transpose_rows(rows) do
rows
|> List.zip()
|> Enum.map(&Tuple.to_list/1)
end
def parse_file() do
AdventOfCode.etl_file("lib/y_2021/d3/input.txt", &parse_row/1)
end
def parse_row(s) do
s
|> String.split("")
|> Enum.reduce([], fn ss, acc ->
if ss != "" do
acc ++ [get_int(Integer.parse(ss), s)]
else
acc
end
end)
end
defp get_int({n, ""}, _), do: n
@doc """
--- Part Two ---
Next, you should verify the life support rating, which can be determined by multiplying the oxygen generator rating by the CO2 scrubber rating.
Both the oxygen generator rating and the CO2 scrubber rating are values that can be found in your diagnostic report - finding them is the tricky part. Both values are located using a similar process that involves filtering out values until only one remains. Before searching for either rating value, start with the full list of binary numbers from your diagnostic report and consider just the first bit of those numbers. Then:
Keep only numbers selected by the bit criteria for the type of rating value for which you are searching. Discard numbers which do not match the bit criteria.
If you only have one number left, stop; this is the rating value for which you are searching.
Otherwise, repeat the process, considering the next bit to the right.
The bit criteria depends on which type of rating value you want to find:
To find oxygen generator rating, determine the most common value (0 or 1) in the current bit position, and keep only numbers with that bit in that position. If 0 and 1 are equally common, keep values with a 1 in the position being considered.
To find CO2 scrubber rating, determine the least common value (0 or 1) in the current bit position, and keep only numbers with that bit in that position. If 0 and 1 are equally common, keep values with a 0 in the position being considered.
For example, to determine the oxygen generator rating value using the same example diagnostic report from above:
Start with all 12 numbers and consider only the first bit of each number. There are more 1 bits (7) than 0 bits (5), so keep only the 7 numbers with a 1 in the first position: 11110, 10110, 10111, 10101, 11100, 10000, and 11001.
Then, consider the second bit of the 7 remaining numbers: there are more 0 bits (4) than 1 bits (3), so keep only the 4 numbers with a 0 in the second position: 10110, 10111, 10101, and 10000.
In the third position, three of the four numbers have a 1, so keep those three: 10110, 10111, and 10101.
In the fourth position, two of the three numbers have a 1, so keep those two: 10110 and 10111.
In the fifth position, there are an equal number of 0 bits and 1 bits (one each). So, to find the oxygen generator rating, keep the number with a 1 in that position: 10111.
As there is only one number left, stop; the oxygen generator rating is 10111, or 23 in decimal.
Then, to determine the CO2 scrubber rating value from the same example above:
Start again with all 12 numbers and consider only the first bit of each number. There are fewer 0 bits (5) than 1 bits (7), so keep only the 5 numbers with a 0 in the first position: 00100, 01111, 00111, 00010, and 01010.
Then, consider the second bit of the 5 remaining numbers: there are fewer 1 bits (2) than 0 bits (3), so keep only the 2 numbers with a 1 in the second position: 01111 and 01010.
In the third position, there are an equal number of 0 bits and 1 bits (one each). So, to find the CO2 scrubber rating, keep the number with a 0 in that position: 01010.
As there is only one number left, stop; the CO2 scrubber rating is 01010, or 10 in decimal.
Finally, to find the life support rating, multiply the oxygen generator rating (23) by the CO2 scrubber rating (10) to get 230.
Use the binary numbers in your diagnostic report to calculate the oxygen generator rating and CO2 scrubber rating, then multiply them together. What is the life support rating of the submarine? (Be sure to represent your answer in decimal, not binary.)
##Examples
iex> AdventOfCode.Y2021.Day3.part2()
%{ co2_val: 3443, life_support_rating: 4672151, o2_val: 1357 }
"""
def part2() do
rows = parse_file()
cols_with_index = transpose_rows(rows) |> Enum.with_index()
o2_val = iterate_and_reduce(rows, cols_with_index, &get_max_val/1)
co2_val = iterate_and_reduce(rows, cols_with_index, &get_min_val/1)
%{
o2_val: o2_val,
co2_val: co2_val,
life_support_rating: o2_val * co2_val
}
end
def get_min_val(%{0 => zeros, 1 => ones}) when ones < zeros, do: 1
def get_min_val(%{0 => _zeros}), do: 0
def get_min_val(%{0 => zeros, 1 => ones}) when zeros <= ones, do: 0
def get_min_val(%{1 => _ones}), do: 1
def iterate_and_reduce([elem], _cols, _func) do
bin_arr_to_int(elem)
end
def iterate_and_reduce(rows, [{head_cols, idx} | rest_cols], func)
when is_list(rest_cols) and length(rows) > 1 do
most_freq =
head_cols
|> Enum.frequencies()
|> func.()
sub_rows =
rows
|> Enum.reject(fn row -> most_freq == Enum.at(row, idx) end)
sub_rows = pick_row_set(sub_rows, rows)
list =
sub_rows
|> transpose_rows()
|> Enum.with_index()
|> Enum.reject(fn {_col, col_idx} ->
col_idx <= idx
end)
iterate_and_reduce(sub_rows, list, func)
end
def pick_row_set([], rows), do: rows
def pick_row_set(sub_rows, _rows), do: sub_rows
end
| 40.060465 | 428 | 0.698247 |
7322beec1469f7200611a23e068cd56c1e85f513 | 5,808 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2beta1_webhook_response.ex | EVLedger/elixir-google-api | 61edef19a5e2c7c63848f7030c6d8d651e4593d4 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2beta1_webhook_response.ex | EVLedger/elixir-google-api | 61edef19a5e2c7c63848f7030c6d8d651e4593d4 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2beta1_webhook_response.ex | EVLedger/elixir-google-api | 61edef19a5e2c7c63848f7030c6d8d651e4593d4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1WebhookResponse do
@moduledoc """
The response message for a webhook call.
This response is validated by the Dialogflow server. If validation fails,
an error will be returned in the QueryResult.diagnostic_info field.
Setting JSON fields to an empty value with the wrong type is a common error.
To avoid this error:
- Use `""` for empty strings
- Use `{}` or `null` for empty objects
- Use `[]` or `null` for empty arrays
For more information, see the
[Protocol Buffers Language
Guide](https://developers.google.com/protocol-buffers/docs/proto3#json).
## Attributes
* `endInteraction` (*type:* `boolean()`, *default:* `nil`) - Optional. Indicates that this intent ends an interaction. Some integrations
(e.g., Actions on Google or Dialogflow phone gateway) use this information
to close interaction with an end user. Default is false.
* `followupEventInput` (*type:* `GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1EventInput.t`, *default:* `nil`) - Optional. Makes the platform immediately invoke another `DetectIntent` call
internally with the specified event as input.
When this field is set, Dialogflow ignores the `fulfillment_text`,
`fulfillment_messages`, and `payload` fields.
* `fulfillmentMessages` (*type:* `list(GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessage.t)`, *default:* `nil`) - Optional. The collection of rich messages to present to the user. This
value is passed directly to `QueryResult.fulfillment_messages`.
* `fulfillmentText` (*type:* `String.t`, *default:* `nil`) - Optional. The text to be shown on the screen. This value is passed directly
to `QueryResult.fulfillment_text`.
* `outputContexts` (*type:* `list(GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1Context.t)`, *default:* `nil`) - Optional. The collection of output contexts. This value is passed directly
to `QueryResult.output_contexts`.
* `payload` (*type:* `map()`, *default:* `nil`) - Optional. This field can be used to pass custom data from your webhook to the API
caller. Arbitrary JSON objects are supported.
When provided, Dialogflow uses this field to populate
`QueryResult.webhook_payload` sent to the API caller.
This field is also used by the
[Google Assistant
integration](https://cloud.google.com/dialogflow/docs/integrations/aog)
for rich response messages.
See the format definition at [Google Assistant Dialogflow webhook
format](https://developers.google.com/assistant/actions/build/json/dialogflow-webhook-json)
* `sessionEntityTypes` (*type:* `list(GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1SessionEntityType.t)`, *default:* `nil`) - Optional. Additional session entity types to replace or extend developer
entity types with. The entity synonyms apply to all languages and persist
for the session of this query. Setting the session entity types inside
webhook overwrites the session entity types that have been set through
`DetectIntentRequest.query_params.session_entity_types`.
* `source` (*type:* `String.t`, *default:* `nil`) - Optional. This value is passed directly to `QueryResult.webhook_source`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:endInteraction => boolean(),
:followupEventInput =>
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1EventInput.t(),
:fulfillmentMessages =>
list(GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessage.t()),
:fulfillmentText => String.t(),
:outputContexts =>
list(GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1Context.t()),
:payload => map(),
:sessionEntityTypes =>
list(GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1SessionEntityType.t()),
:source => String.t()
}
field(:endInteraction)
field(:followupEventInput,
as: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1EventInput
)
field(:fulfillmentMessages,
as: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessage,
type: :list
)
field(:fulfillmentText)
field(:outputContexts,
as: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1Context,
type: :list
)
field(:payload, type: :map)
field(:sessionEntityTypes,
as: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1SessionEntityType,
type: :list
)
field(:source)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1WebhookResponse do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1WebhookResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1WebhookResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 45.023256 | 215 | 0.735537 |
7322e7447aaebf016257ced421b97fb571480efe | 2,438 | ex | Elixir | lib/types/time.ex | kenuyx/timex_ecto | 879f80b69f0301b5465ac73c12977a825a8f2060 | [
"MIT"
] | null | null | null | lib/types/time.ex | kenuyx/timex_ecto | 879f80b69f0301b5465ac73c12977a825a8f2060 | [
"MIT"
] | null | null | null | lib/types/time.ex | kenuyx/timex_ecto | 879f80b69f0301b5465ac73c12977a825a8f2060 | [
"MIT"
] | null | null | null | defmodule Timex.Ecto.Time do
@moduledoc """
Support for using Timex with :time fields
"""
use Timex
@behaviour Ecto.Type
def type, do: :time
@doc """
Handle casting to Timex.Ecto.Time
"""
def cast(input) when is_binary(input) do
case Timex.parse(input, "{ISOtime}") do
{:ok, %NaiveDateTime{hour: hour,
minute: minute,
second: second,
microsecond: {us,_}}} ->
load({hour, minute, second, us})
{:error, _} -> :error
end
end
def cast({h, m, s} = timestamp) when is_number(h) and is_number(m) and is_number(s) do
{:ok, Duration.from_erl(timestamp)}
end
def cast(%Duration{} = d) do
{:ok, d}
end
# Support embeds_one/embeds_many
def cast(%{"megaseconds" => m, "seconds" => s, "microseconds" => us}) do
clock = Duration.to_clock({m,s,us})
load(clock)
end
def cast(%{"hour" => h, "minute" => mm, "second" => s, "ms" => ms}) do
load({h, mm, s, ms * 1_000})
end
def cast(%{"hour" => h, "minute" => mm, "second" => s, "millisecond" => ms}) do
load({h, mm, s, ms * 1_000})
end
def cast(%{"hour" => h, "minute" => mm, "second" => s, "microsecond" => {us, _}}) do
load({h, mm, s, us})
end
def cast(input) do
case Ecto.Time.cast(input) do
{:ok, time} -> load({time.hour, time.min, time.sec, time.usec})
:error -> :error
end
end
@doc """
Load from the native Ecto representation
"""
def load({_hour, _minute, _second, _usecs} = clock) do
d = Duration.from_clock(clock)
{:ok, d}
end
def load(%{:__struct__ => Postgrex.Interval, :days => days, :months => months, :secs => seconds}) do
d = Duration.from_clock({ ((months * 30) + days) * 24, 0, seconds, 0 })
{:ok, d}
end
def load(_), do: :error
@doc """
Convert to the native Ecto representation
"""
def dump(%Duration{} = d) do
{:ok, Duration.to_clock(d)}
end
def dump({_mega, _sec, _micro} = timestamp) do
{:ok, Duration.to_clock(Duration.from_erl(timestamp))}
end
def dump(_), do: :error
def autogenerate(precision \\ :sec)
def autogenerate(:sec) do
{_date, {h, m, s}} = :erlang.universaltime
load({h, m, s, 0}) |> elem(1)
end
def autogenerate(:usec) do
timestamp = {_,_, usec} = :os.timestamp
{_date, {h, m, s}} = :calendar.now_to_datetime(timestamp)
load({h, m, s, usec}) |> elem(1)
end
end
| 26.791209 | 102 | 0.568909 |
7322f51b55c75bf803588bbdd2fee965a288dc6f | 25,264 | ex | Elixir | lib/mix/lib/mix/compilers/elixir.ex | jwarwick/elixir | de103c0f4e3240aa38967298ccb5f483a9e40c16 | [
"Apache-2.0"
] | 1 | 2021-05-05T02:11:24.000Z | 2021-05-05T02:11:24.000Z | lib/mix/lib/mix/compilers/elixir.ex | jwarwick/elixir | de103c0f4e3240aa38967298ccb5f483a9e40c16 | [
"Apache-2.0"
] | 6 | 2021-03-19T12:33:21.000Z | 2021-04-02T17:52:45.000Z | lib/mix/lib/mix/compilers/elixir.ex | jwarwick/elixir | de103c0f4e3240aa38967298ccb5f483a9e40c16 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Compilers.Elixir do
@moduledoc false
@manifest_vsn 8
import Record
defrecord :module, [:module, :kind, :sources, :export, :recompile?]
defrecord :source,
source: nil,
size: 0,
compile_references: [],
export_references: [],
runtime_references: [],
compile_env: [],
external: [],
warnings: [],
modules: []
@doc """
Compiles stale Elixir files.
It expects a `manifest` file, the source directories, the destination
directory, an option to know if compilation is being forced or not, and a
list of any additional compiler options.
The `manifest` is written down with information including dependencies
between modules, which helps it recompile only the modules that
have changed at runtime.
"""
def compile(manifest, srcs, dest, exts, force, opts) do
# We fetch the time from before we read files so any future
# change to files are still picked up by the compiler. This
# timestamp is used when writing BEAM files and the manifest.
timestamp = System.os_time(:second)
all_paths = Mix.Utils.extract_files(srcs, exts)
{all_modules, all_sources, all_local_exports} = parse_manifest(manifest, dest)
modified = Mix.Utils.last_modified(manifest)
{stale_local_deps, stale_local_mods, stale_local_exports, all_local_exports} =
stale_local_deps(manifest, modified, all_local_exports)
prev_paths = for source(source: source) <- all_sources, do: source
removed = prev_paths -- all_paths
{sources, removed_modules} = remove_removed_sources(all_sources, removed)
{modules, exports, changed, sources_stats} =
if force do
compiler_info_from_force(manifest, all_paths, all_modules, dest)
else
compiler_info_from_updated(
modified,
all_paths,
all_modules,
all_sources,
prev_paths,
removed,
stale_local_mods,
Map.merge(stale_local_exports, removed_modules),
dest
)
end
stale = changed -- removed
{sources, removed_modules} =
update_stale_sources(sources, stale, removed_modules, sources_stats)
if opts[:all_warnings], do: show_warnings(sources)
cond do
stale != [] ->
Mix.Utils.compiling_n(length(stale), hd(exts))
Mix.Project.ensure_structure()
true = Code.prepend_path(dest)
previous_opts =
{stale_local_deps, opts}
|> Mix.Compilers.ApplicationTracer.init()
|> set_compiler_opts()
# Stores state for keeping track which files were compiled
# and the dependencies between them.
put_compiler_info({modules, exports, sources, modules, removed_modules})
try do
compile_path(stale, dest, timestamp, opts)
else
{:ok, _, warnings} ->
{modules, _exports, sources, _pending_modules, _pending_exports} = get_compiler_info()
sources = apply_warnings(sources, warnings)
write_manifest(manifest, modules, sources, all_local_exports, timestamp)
put_compile_env(sources)
{:ok, Enum.map(warnings, &diagnostic(&1, :warning))}
{:error, errors, warnings} ->
# In case of errors, we show all previous warnings and all new ones
{_, _, sources, _, _} = get_compiler_info()
errors = Enum.map(errors, &diagnostic(&1, :error))
warnings = Enum.map(warnings, &diagnostic(&1, :warning))
{:error, warning_diagnostics(sources) ++ warnings ++ errors}
after
Code.compiler_options(previous_opts)
Mix.Compilers.ApplicationTracer.stop()
Code.purge_compiler_modules()
delete_compiler_info()
end
# We need to return ok if stale_local_mods changed
# because we want that to propagate to compile.protocols
removed != [] or stale_local_mods != %{} ->
write_manifest(manifest, modules, sources, all_local_exports, timestamp)
{:ok, warning_diagnostics(sources)}
true ->
{:noop, warning_diagnostics(sources)}
end
end
@doc """
Removes compiled files for the given `manifest`.
"""
def clean(manifest, compile_path) do
{modules, _} = read_manifest(manifest)
Enum.each(modules, fn module(module: module) ->
File.rm(beam_path(compile_path, module))
end)
end
@doc """
Returns protocols and implementations for the given `manifest`.
"""
def protocols_and_impls(manifest, compile_path) do
{modules, _} = read_manifest(manifest)
for module(module: module, kind: kind) <- modules,
match?(:protocol, kind) or match?({:impl, _}, kind),
do: {module, kind, beam_path(compile_path, module)}
end
@doc """
Reads the manifest for external consumption.
"""
def read_manifest(manifest) do
try do
manifest |> File.read!() |> :erlang.binary_to_term()
rescue
_ -> {[], []}
else
{@manifest_vsn, modules, sources, _local_exports} -> {modules, sources}
_ -> {[], []}
end
end
defp compiler_info_from_force(manifest, all_paths, all_modules, dest) do
# A config, path dependency or manifest has changed, let's just compile everything
for module(module: module) <- all_modules,
do: remove_and_purge(beam_path(dest, module), module)
sources_stats =
for path <- all_paths,
into: %{},
do: {path, Mix.Utils.last_modified_and_size(path)}
# Now that we have deleted all beams, remember to remove the manifest.
# This is important in case mix compile --force fails, otherwise we
# would have an outdated manifest.
File.rm(manifest)
{[], %{}, all_paths, sources_stats}
end
defp compiler_info_from_updated(
modified,
all_paths,
all_modules,
all_sources,
prev_paths,
removed,
stale_local_mods,
stale_local_exports,
dest
) do
# Otherwise let's start with the new sources
new_paths = all_paths -- prev_paths
sources_stats =
for path <- new_paths,
into: mtimes_and_sizes(all_sources),
do: {path, Mix.Utils.last_modified_and_size(path)}
modules_to_recompile =
for module(module: module, recompile?: true) <- all_modules,
recompile_module?(module),
into: %{},
do: {module, true}
# Sources that have changed on disk or
# any modules associated with them need to be recompiled
changed =
for source(source: source, external: external, size: size, modules: modules) <-
all_sources,
{last_mtime, last_size} = Map.fetch!(sources_stats, source),
times = Enum.map(external, &(sources_stats |> Map.fetch!(&1) |> elem(0))),
size != last_size or Mix.Utils.stale?([last_mtime | times], [modified]) or
Enum.any?(modules, &Map.has_key?(modules_to_recompile, &1)),
do: source
changed = new_paths ++ changed
{modules, exports, changed} =
update_stale_entries(
all_modules,
all_sources,
removed ++ changed,
stale_local_mods,
stale_local_exports,
dest
)
{modules, exports, changed, sources_stats}
end
defp mtimes_and_sizes(sources) do
Enum.reduce(sources, %{}, fn source(source: source, external: external), map ->
Enum.reduce([source | external], map, fn file, map ->
Map.put_new_lazy(map, file, fn -> Mix.Utils.last_modified_and_size(file) end)
end)
end)
end
defp compile_path(stale, dest, timestamp, opts) do
cwd = File.cwd!()
long_compilation_threshold = opts[:long_compilation_threshold] || 10
verbose = opts[:verbose] || false
compile_opts = [
each_cycle: fn -> each_cycle(dest, timestamp) end,
each_file: &each_file(&1, &2, cwd, verbose),
each_module: &each_module(&1, &2, &3, cwd),
each_long_compilation: &each_long_compilation(&1, cwd, long_compilation_threshold),
long_compilation_threshold: long_compilation_threshold,
profile: opts[:profile],
beam_timestamp: timestamp
]
Kernel.ParallelCompiler.compile_to_path(stale, dest, compile_opts)
end
defp get_compiler_info(), do: Process.get(__MODULE__)
defp put_compiler_info(value), do: Process.put(__MODULE__, value)
defp delete_compiler_info(), do: Process.delete(__MODULE__)
defp set_compiler_opts(opts) do
opts
|> Keyword.take(Code.available_compiler_options())
|> Code.compiler_options()
end
defp put_compile_env(sources) do
all_compile_env =
Enum.reduce(sources, :ordsets.new(), fn source(compile_env: compile_env), acc ->
:ordsets.union(compile_env, acc)
end)
Mix.ProjectStack.compile_env(all_compile_env)
end
defp each_cycle(compile_path, timestamp) do
{modules, _exports, sources, pending_modules, pending_exports} = get_compiler_info()
{pending_modules, exports, changed} =
update_stale_entries(pending_modules, sources, [], %{}, pending_exports, compile_path)
# For each changed file, mark it as changed.
# If compilation fails mid-cycle, they will
# be picked next time around.
for file <- changed do
File.touch!(file, timestamp)
end
if changed == [] do
runtime_modules = dependent_runtime_modules(sources, modules, pending_modules)
warnings = Mix.Compilers.ApplicationTracer.warnings(modules)
{:runtime, runtime_modules, warnings}
else
modules =
for module(sources: source_files) = module <- modules do
module(module, sources: source_files -- changed)
end
# If we have a compile time dependency to a module, as soon as its file
# change, we will detect the compile time dependency and recompile. However,
# the whole goal of pending exports is to delay this decision, so we need to
# track which modules were removed and start them as our pending exports and
# remove the pending exports as we notice they have not gone stale.
{sources, removed_modules} = update_stale_sources(sources, changed)
put_compiler_info({modules, exports, sources, pending_modules, removed_modules})
{:compile, changed, []}
end
end
defp dependent_runtime_modules(sources, all_modules, pending_modules) do
changed_modules =
for module(module: module) = entry <- all_modules,
entry not in pending_modules,
into: %{},
do: {module, true}
fixpoint_runtime_modules(sources, changed_modules, %{}, pending_modules)
end
defp fixpoint_runtime_modules(sources, changed, dependent, not_dependent) do
{new_dependent, not_dependent} =
Enum.reduce(not_dependent, {dependent, []}, fn module, {new_dependent, not_dependent} ->
depending? =
Enum.any?(module(module, :sources), fn file ->
source(runtime_references: runtime_refs) =
List.keyfind(sources, file, source(:source))
has_any_key?(changed, runtime_refs)
end)
if depending? do
{Map.put(new_dependent, module(module, :module), true), not_dependent}
else
{new_dependent, [module | not_dependent]}
end
end)
if map_size(dependent) != map_size(new_dependent) do
fixpoint_runtime_modules(sources, new_dependent, new_dependent, not_dependent)
else
Map.keys(new_dependent)
end
end
defp each_module(file, module, _binary, cwd) do
{modules, exports, sources, pending_modules, pending_exports} = get_compiler_info()
kind = detect_kind(module)
file = Path.relative_to(file, cwd)
external = get_external_resources(module, cwd)
old_export = Map.get(exports, module)
new_export = exports_md5(module, true)
pending_exports =
if old_export && old_export != new_export do
pending_exports
else
Map.delete(pending_exports, module)
end
{module_sources, existing_module?} =
case List.keyfind(modules, module, module(:module)) do
module(sources: old_sources) -> {[file | List.delete(old_sources, file)], true}
nil -> {[file], false}
end
{source, sources} =
List.keytake(sources, file, source(:source)) ||
Mix.raise(
"Could not find source for #{inspect(file)}. Make sure the :elixirc_paths configuration " <>
"is a list of relative paths to the current project or absolute paths to external directories"
)
source =
source(
source,
external: external ++ source(source, :external),
modules: [module | source(source, :modules)]
)
module =
module(
module: module,
kind: kind,
sources: module_sources,
export: new_export,
recompile?: function_exported?(module, :__mix_recompile__?, 0)
)
modules = prepend_or_merge(modules, module, module(:module), module, existing_module?)
put_compiler_info({modules, exports, [source | sources], pending_modules, pending_exports})
:ok
end
defp recompile_module?(module) do
Code.ensure_loaded?(module) and
function_exported?(module, :__mix_recompile__?, 0) and
module.__mix_recompile__?()
end
defp prepend_or_merge(collection, key, pos, value, true) do
List.keystore(collection, key, pos, value)
end
defp prepend_or_merge(collection, _key, _pos, value, false) do
[value | collection]
end
defp detect_kind(module) do
protocol_metadata = Module.get_attribute(module, :__impl__)
cond do
is_list(protocol_metadata) and protocol_metadata[:protocol] ->
{:impl, protocol_metadata[:protocol]}
is_list(Module.get_attribute(module, :__protocol__)) ->
:protocol
true ->
:module
end
end
defp get_external_resources(module, cwd) do
for file <- Module.get_attribute(module, :external_resource), do: Path.relative_to(file, cwd)
end
defp each_file(file, lexical, cwd, verbose) do
file = Path.relative_to(file, cwd)
if verbose do
Mix.shell().info("Compiled #{file}")
end
{modules, exports, sources, pending_modules, pending_exports} = get_compiler_info()
{source, sources} = List.keytake(sources, file, source(:source))
{compile_references, export_references, runtime_references, compile_env} =
Kernel.LexicalTracker.references(lexical)
compile_references =
Enum.reject(compile_references, &match?("elixir_" <> _, Atom.to_string(&1)))
source(modules: source_modules) = source
compile_references = compile_references -- source_modules
export_references = export_references -- source_modules
runtime_references = runtime_references -- source_modules
source =
source(
source,
compile_references: compile_references,
export_references: export_references,
runtime_references: runtime_references,
compile_env: compile_env
)
put_compiler_info({modules, exports, [source | sources], pending_modules, pending_exports})
:ok
end
defp each_long_compilation(file, cwd, threshold) do
Mix.shell().info(
"Compiling #{Path.relative_to(file, cwd)} (it's taking more than #{threshold}s)"
)
end
## Resolution
defp remove_removed_sources(sources, removed) do
Enum.reduce(removed, {sources, %{}}, fn file, {acc_sources, acc_modules} ->
{source(modules: modules), acc_sources} = List.keytake(acc_sources, file, source(:source))
acc_modules = Enum.reduce(modules, acc_modules, &Map.put(&2, &1, true))
{acc_sources, acc_modules}
end)
end
# Initial definition of empty records for changed sources
# as the compiler appends data. This may include new files,
# so we rely on sources_stats to avoid multiple FS lookups.
defp update_stale_sources(sources, stale, removed_modules, sources_stats) do
Enum.reduce(stale, {sources, removed_modules}, fn file, {acc_sources, acc_modules} ->
%{^file => {_, size}} = sources_stats
{modules, acc_sources} =
case List.keytake(acc_sources, file, source(:source)) do
{source(modules: modules), acc_sources} -> {modules, acc_sources}
nil -> {[], acc_sources}
end
acc_modules = Enum.reduce(modules, acc_modules, &Map.put(&2, &1, true))
{[source(source: file, size: size) | acc_sources], acc_modules}
end)
end
# Define empty records for the sources that needs
# to be recompiled (but were not changed on disk)
defp update_stale_sources(sources, changed) do
Enum.reduce(changed, {sources, %{}}, fn file, {acc_sources, acc_modules} ->
{source(size: size, modules: modules), acc_sources} =
List.keytake(acc_sources, file, source(:source))
acc_modules = Enum.reduce(modules, acc_modules, &Map.put(&2, &1, true))
{[source(source: file, size: size) | acc_sources], acc_modules}
end)
end
# This function receives the manifest entries and some source
# files that have changed. Then it recursively figures out
# all the files that changed (via the module dependencies) and
# return the non-changed entries and the removed sources.
defp update_stale_entries(modules, _sources, [], stale_mods, stale_exports, _compile_path)
when stale_mods == %{} and stale_exports == %{} do
{modules, %{}, []}
end
defp update_stale_entries(modules, sources, changed, stale_mods, stale_exports, compile_path) do
changed = Enum.into(changed, %{}, &{&1, true})
reducer = &remove_stale_entry(&1, &2, sources, stale_exports, compile_path)
remove_stale_entries(modules, %{}, changed, stale_mods, reducer)
end
defp remove_stale_entries(modules, exports, old_changed, old_stale, reducer) do
{pending_modules, exports, new_changed, new_stale} =
Enum.reduce(modules, {[], exports, old_changed, old_stale}, reducer)
if map_size(new_stale) > map_size(old_stale) or map_size(new_changed) > map_size(old_changed) do
remove_stale_entries(pending_modules, exports, new_changed, new_stale, reducer)
else
{pending_modules, exports, Map.keys(new_changed)}
end
end
defp remove_stale_entry(entry, acc, sources, stale_exports, compile_path) do
module(module: module, sources: source_files, export: export) = entry
{rest, exports, changed, stale} = acc
{compile_references, export_references, runtime_references} =
Enum.reduce(source_files, {[], [], []}, fn file, {compile_acc, export_acc, runtime_acc} ->
source(
compile_references: compile_refs,
export_references: export_refs,
runtime_references: runtime_refs
) = List.keyfind(sources, file, source(:source))
{compile_acc ++ compile_refs, export_acc ++ export_refs, runtime_acc ++ runtime_refs}
end)
cond do
# If I changed in disk or have a compile time reference to
# something stale or have a reference to an old export,
# I need to be recompiled.
has_any_key?(changed, source_files) or has_any_key?(stale, compile_references) or
has_any_key?(stale_exports, export_references) ->
remove_and_purge(beam_path(compile_path, module), module)
changed = Enum.reduce(source_files, changed, &Map.put(&2, &1, true))
{rest, Map.put(exports, module, export), changed, Map.put(stale, module, true)}
# If I have a runtime references to something stale,
# I am stale too.
has_any_key?(stale, runtime_references) ->
{[entry | rest], exports, changed, Map.put(stale, module, true)}
# Otherwise, we don't store it anywhere
true ->
{[entry | rest], exports, changed, stale}
end
end
defp has_any_key?(map, enumerable) do
Enum.any?(enumerable, &Map.has_key?(map, &1))
end
defp stale_local_deps(manifest, modified, old_exports) do
base = Path.basename(manifest)
for %{scm: scm, opts: opts} = dep <- Mix.Dep.cached(),
not scm.fetchable?,
Mix.Utils.last_modified(Path.join([opts[:build], ".mix", base])) > modified,
reduce: {%{}, %{}, %{}, old_exports} do
{deps, modules, exports, new_exports} ->
{modules, exports, new_exports} =
for path <- Mix.Dep.load_paths(dep),
beam <- Path.wildcard(Path.join(path, "*.beam")),
Mix.Utils.last_modified(beam) > modified,
reduce: {modules, exports, new_exports} do
{modules, exports, new_exports} ->
module = beam |> Path.basename() |> Path.rootname() |> String.to_atom()
export = exports_md5(module, false)
modules = Map.put(modules, module, true)
# If the exports are the same, then the API did not change,
# so we do not mark the export as stale. Note this has to
# be very conservative. If the module is not loaded or if
# the exports were not there, we need to consider it a stale
# export.
exports =
if export && old_exports[module] == export,
do: exports,
else: Map.put(exports, module, true)
# In any case, we always store it as the most update export
# that we have, otherwise we delete it.
new_exports =
if export,
do: Map.put(new_exports, module, export),
else: Map.delete(new_exports, module)
{modules, exports, new_exports}
end
{Map.put(deps, dep.app, true), modules, exports, new_exports}
end
end
defp exports_md5(module, use_attributes?) do
cond do
function_exported?(module, :__info__, 1) ->
module.__info__(:exports_md5)
use_attributes? ->
defs = :lists.sort(Module.definitions_in(module, :def))
defmacros = :lists.sort(Module.definitions_in(module, :defmacro))
struct =
case Module.get_attribute(module, :__struct__) do
%{} = entry -> {entry, List.wrap(Module.get_attribute(module, :enforce_keys))}
_ -> nil
end
{defs, defmacros, struct} |> :erlang.term_to_binary() |> :erlang.md5()
true ->
nil
end
end
defp remove_and_purge(beam, module) do
_ = File.rm(beam)
_ = :code.purge(module)
_ = :code.delete(module)
end
defp show_warnings(sources) do
for source(source: source, warnings: warnings) <- sources do
file = Path.absname(source)
for {line, message} <- warnings do
:elixir_errors.erl_warn(line, file, message)
end
end
end
defp apply_warnings(sources, warnings) do
warnings = Enum.group_by(warnings, &elem(&1, 0), &{elem(&1, 1), elem(&1, 2)})
for source(source: source_path, warnings: source_warnings) = s <- sources do
source(s, warnings: Map.get(warnings, Path.absname(source_path), source_warnings))
end
end
defp warning_diagnostics(sources) do
for source(source: source, warnings: warnings) <- sources,
{line, message} <- warnings,
do: diagnostic({Path.absname(source), line, message}, :warning)
end
defp diagnostic({file, line, message}, severity) do
%Mix.Task.Compiler.Diagnostic{
file: file,
position: line,
message: message,
severity: severity,
compiler_name: "Elixir"
}
end
## Manifest handling
# Similar to read_manifest, but for internal consumption and with data migration support.
defp parse_manifest(manifest, compile_path) do
try do
manifest |> File.read!() |> :erlang.binary_to_term()
rescue
_ ->
{[], [], %{}}
else
{@manifest_vsn, modules, sources, local_exports} ->
{modules, sources, local_exports}
# From v5 and later
{vsn, modules, _sources} when is_integer(vsn) ->
purge_old_manifest(compile_path, modules)
# From v4 and before
[vsn | data] when is_integer(vsn) ->
purge_old_manifest(compile_path, data)
_ ->
{[], [], %{}}
end
end
defp purge_old_manifest(compile_path, data) do
try do
for module <- data, elem(module, 0) == :module do
module = elem(module, 1)
File.rm(beam_path(compile_path, module))
:code.purge(module)
:code.delete(module)
end
rescue
_ ->
Mix.raise(
"Cannot clean-up stale manifest, please run \"mix clean --deps\" manually before proceeding"
)
end
{[], [], %{}}
end
defp write_manifest(manifest, [], [], _exports, _timestamp) do
File.rm(manifest)
:ok
end
defp write_manifest(manifest, modules, sources, exports, timestamp) do
File.mkdir_p!(Path.dirname(manifest))
term = {@manifest_vsn, modules, sources, exports}
manifest_data = :erlang.term_to_binary(term, [:compressed])
File.write!(manifest, manifest_data)
File.touch!(manifest, timestamp)
# Since Elixir is a dependency itself, we need to touch the lock
# so the current Elixir version, used to compile the files above,
# is properly stored.
Mix.Dep.ElixirSCM.update()
end
defp beam_path(compile_path, module) do
Path.join(compile_path, Atom.to_string(module) <> ".beam")
end
end
| 34.002692 | 106 | 0.653776 |
7323139d838cb02053e9e8b2ef59e166e121cbdc | 8,574 | ex | Elixir | lib/hexpm/web/controllers/controller_helpers.ex | findmypast/hexfmp | 38a50f5e1057833fd98748faac230bf4b9cc26a3 | [
"Apache-2.0"
] | null | null | null | lib/hexpm/web/controllers/controller_helpers.ex | findmypast/hexfmp | 38a50f5e1057833fd98748faac230bf4b9cc26a3 | [
"Apache-2.0"
] | null | null | null | lib/hexpm/web/controllers/controller_helpers.ex | findmypast/hexfmp | 38a50f5e1057833fd98748faac230bf4b9cc26a3 | [
"Apache-2.0"
] | null | null | null | defmodule Hexpm.Web.ControllerHelpers do
import Plug.Conn
import Phoenix.Controller
alias Hexpm.Accounts.Auth
alias Hexpm.Repository.{Packages, Releases, Repositories}
@max_cache_age 60
def cache(conn, control, vary) do
conn
|> maybe_put_resp_header("cache-control", parse_control(control))
|> maybe_put_resp_header("vary", parse_vary(vary))
end
def api_cache(conn, privacy) do
control = [privacy] ++ ["max-age": @max_cache_age]
vary = ["accept", "accept-encoding"]
cache(conn, control, vary)
end
defp parse_vary(nil), do: nil
defp parse_vary(vary), do: Enum.map_join(vary, ", ", &"#{&1}")
defp parse_control(nil), do: nil
defp parse_control(control) do
Enum.map_join(control, ", ", fn
atom when is_atom(atom) -> "#{atom}"
{key, value} -> "#{key}=#{value}"
end)
end
defp maybe_put_resp_header(conn, _header, nil),
do: conn
defp maybe_put_resp_header(conn, header, value),
do: put_resp_header(conn, header, value)
def render_error(conn, status, assigns \\ []) do
conn
|> put_status(status)
|> put_layout(false)
|> render(Hexpm.Web.ErrorView, :"#{status}", assigns)
|> halt
end
def validation_failed(conn, %Ecto.Changeset{} = changeset) do
errors = translate_errors(changeset)
render_error(conn, 422, errors: errors)
end
def validation_failed(conn, errors) do
render_error(conn, 422, errors: errors_to_map(errors))
end
defp pretty_type({:array, type}),
do: "list(#{pretty_type(type)})"
defp pretty_type({:map, type}),
do: "map(#{pretty_type(type)})"
defp pretty_type(type),
do: to_string(type)
def translate_errors(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn
{"is invalid", [type: type, validation: _]} ->
"expected type #{pretty_type(type)}"
{msg, opts} ->
Enum.reduce(opts, msg, fn {key, value}, msg ->
if String.Chars.impl_for(key) && String.Chars.impl_for(value) do
String.replace(msg, "%{#{key}}", to_string(value))
else
raise "Unable to translate error: #{inspect({msg, opts})}"
end
end)
end)
|> normalize_errors
end
# TODO: remove when requirements are handled with cast_assoc
defp errors_to_map(errors) when is_list(errors) do
Enum.into(errors, %{}, fn {key, value} -> {key, errors_to_map(value)} end)
end
defp errors_to_map(other), do: other
# TODO: Fix clients instead
# Since Changeset.traverse_errors returns `{field: [err], ...}`
# but Hex client expects `{field: err1, ...}` we normalize to the latter.
defp normalize_errors(errors) do
Enum.flat_map(errors, fn
{_key, val} when val == %{} -> []
{_key, [val|_]} when val == %{} -> []
{_key, []} -> []
{key, val} when is_map(val) -> [{key, normalize_errors(val)}]
{key, [val|_]} when is_map(val) -> [{key, normalize_errors(val)}]
{key, [val|_]} -> [{key, val}]
end)
|> Enum.into(%{})
end
def not_found(conn) do
render_error(conn, 404)
end
def when_stale(conn, entities, opts \\ [], fun) do
etag = etag(entities)
modified = if Keyword.get(opts, :modified, true), do: last_modified(entities)
conn =
conn
|> put_etag(etag)
|> put_last_modified(modified)
if fresh?(conn, etag: etag, modified: modified) do
send_resp(conn, 304, "")
else
fun.(conn)
end
end
defp put_etag(conn, nil),
do: conn
defp put_etag(conn, etag),
do: put_resp_header(conn, "etag", etag)
defp put_last_modified(conn, nil),
do: conn
defp put_last_modified(conn, modified),
do: put_resp_header(conn, "last-modified", :cowboy_clock.rfc1123(modified))
defp fresh?(conn, opts) do
not expired?(conn, opts)
end
defp expired?(conn, opts) do
modified_since = List.first get_req_header(conn, "if-modified-since")
none_match = List.first get_req_header(conn, "if-none-match")
if modified_since || none_match do
modified_since?(modified_since, opts[:modified]) or
none_match?(none_match, opts[:etag])
else
true
end
end
defp modified_since?(header, last_modified) do
if header && last_modified do
modified_since = :cowboy_http.rfc1123_date(header)
modified_since = :calendar.datetime_to_gregorian_seconds(modified_since)
last_modified = :calendar.datetime_to_gregorian_seconds(last_modified)
last_modified > modified_since
else
false
end
end
defp none_match?(none_match, etag) do
if none_match && etag do
none_match = Plug.Conn.Utils.list(none_match)
not(etag in none_match) and not("*" in none_match)
else
false
end
end
defp etag(nil), do: nil
defp etag([]), do: nil
defp etag(models) do
list = Enum.map(List.wrap(models), fn model ->
[model.__struct__, model.id, model.updated_at]
end)
binary = :erlang.term_to_binary(list)
:crypto.hash(:md5, binary)
|> Base.encode16(case: :lower)
end
def last_modified(nil), do: nil
def last_modified([]), do: nil
def last_modified(models) do
Enum.map(List.wrap(models), fn model ->
NaiveDateTime.to_erl(model.updated_at)
end)
|> Enum.max
end
def maybe_fetch_package(conn, _opts) do
if repository = Repositories.get(conn.params["repository"]) do
conn = assign(conn, :repository, repository)
if package = Packages.get(repository, conn.params["name"]) do
assign(conn, :package, package)
else
assign(conn, :package, nil)
end
else
conn |> not_found |> halt
end
end
def fetch_package(conn, _opts) do
if repository = Repositories.get(conn.params["repository"]) do
package = Packages.get(repository, conn.params["name"])
if package do
conn
|> assign(:repository, repository)
|> assign(:package, package)
else
conn |> not_found |> halt
end
else
conn |> not_found |> halt
end
end
def fetch_release(conn, _opts) do
if repository = Repositories.get(conn.params["repository"]) do
package = Hexpm.Repository.Packages.get(repository, conn.params["name"])
release = package && Releases.get(package, conn.params["version"])
if release do
conn
|> assign(:repository, repository)
|> assign(:package, package)
|> assign(:release, release)
else
conn |> not_found |> halt
end
else
conn |> not_found |> halt
end
end
def authorize(conn, opts) do
fun = Keyword.get(opts, :fun, fn _, _ -> true end)
Hexpm.Web.AuthHelpers.authorized(conn, opts, &fun.(conn, &1))
end
def audit_data(conn) do
# TODO: We should generalize logged_in and user between
# the web and api pipelines
user = conn.assigns[:logged_in] || conn.assigns[:user]
{user, conn.assigns.user_agent}
end
def success_to_status(true), do: 200
def success_to_status(false), do: 400
def password_auth(username, password) do
case Auth.password_auth(username, password) do
{:ok, {user, nil, email}} ->
if email.verified,
do: {:ok, user},
else: {:error, :unconfirmed}
:error ->
{:error, :wrong}
end
end
def auth_error_message(:wrong), do: "Invalid username, email or password."
def auth_error_message(:unconfirmed), do: "Email has not been verified yet."
def requires_login(conn, _opts) do
if logged_in?(conn) do
conn
else
redirect(conn, to: Hexpm.Web.Router.Helpers.login_path(conn, :show, return: conn.request_path))
|> halt
end
end
def logged_in?(conn) do
!!conn.assigns[:logged_in]
end
def nillify_params(conn, keys) do
params =
Enum.reduce(keys, conn.params, fn key, params ->
case Map.fetch(conn.params, key) do
{:ok, value} -> Map.put(params, key, scrub_param(value))
:error -> params
end
end)
%{conn | params: params}
end
defp scrub_param(%{__struct__: mod} = struct) when is_atom(mod) do
struct
end
defp scrub_param(%{} = param) do
Enum.reduce(param, %{}, fn({k, v}, acc) ->
Map.put(acc, k, scrub_param(v))
end)
end
defp scrub_param(param) when is_list(param) do
Enum.map(param, &scrub_param/1)
end
defp scrub_param(param) do
if scrub?(param), do: nil, else: param
end
defp scrub?(" " <> rest), do: scrub?(rest)
defp scrub?(""), do: true
defp scrub?(_), do: false
end
| 28.111475 | 101 | 0.630627 |
732316aeb54bcd3e6716eb3275c61f08b59a541a | 543 | ex | Elixir | lib/docsbr.ex | oborba/docsbr | f1a8b2e7a6e8939962e3286dbd2569e4dfb1906a | [
"MIT"
] | 2 | 2016-05-30T14:39:46.000Z | 2016-08-09T17:25:09.000Z | lib/docsbr.ex | oborba/docsbr | f1a8b2e7a6e8939962e3286dbd2569e4dfb1906a | [
"MIT"
] | null | null | null | lib/docsbr.ex | oborba/docsbr | f1a8b2e7a6e8939962e3286dbd2569e4dfb1906a | [
"MIT"
] | null | null | null | defmodule Docsbr do
@moduledoc"""
Main module, parse the options and generate a required document
"""
alias Docsbr.GenerateCpf, as: Cpf
alias Docsbr.GenerateCnpj, as: Cnpj
def main(args), do: args |> parse_args |> process
defp parse_args(args) do
{options, _, _} = OptionParser.parse(args,
switches: [doc: :string]
)
options[:doc]
end
defp process("cpf"), do: IO.puts Cpf.generate
defp process("cnpj"), do: IO.puts Cnpj.generate
defp process(_), do: IO.puts "Usage: $ docsbr --doc=<cpf | cnpj>"
end
| 24.681818 | 67 | 0.664825 |
732332e3ca70129f8dcda9c466838febe2b000b6 | 391 | ex | Elixir | lib/code_corps/cloudex/cloudinary_url.ex | superdev999/Phoenix-project | ab13ac9366cdd0aa9581da7faf993b11aaa5344c | [
"MIT"
] | null | null | null | lib/code_corps/cloudex/cloudinary_url.ex | superdev999/Phoenix-project | ab13ac9366cdd0aa9581da7faf993b11aaa5344c | [
"MIT"
] | null | null | null | lib/code_corps/cloudex/cloudinary_url.ex | superdev999/Phoenix-project | ab13ac9366cdd0aa9581da7faf993b11aaa5344c | [
"MIT"
] | null | null | null | defmodule CodeCorps.Cloudex.CloudinaryUrl do
@cloudex Application.get_env(:code_corps, :cloudex)
def for(nil, _options, version, default_color, type) do
"#{Application.get_env(:code_corps, :asset_host)}/icons/#{type}_default_#{version}_#{default_color}.png"
end
def for(public_id, options, _version, _default_color, _type) do
@cloudex.Url.for(public_id, options)
end
end
| 32.583333 | 108 | 0.751918 |
73234d561d6c9ab249c96da719be849fa14c8b18 | 80 | exs | Elixir | backend/test/bucoliq_web/views/page_view_test.exs | antogon/bucoliq | 0fe4727c4312322862d30014bdfae2530cc49de1 | [
"MIT"
] | null | null | null | backend/test/bucoliq_web/views/page_view_test.exs | antogon/bucoliq | 0fe4727c4312322862d30014bdfae2530cc49de1 | [
"MIT"
] | 9 | 2019-12-01T18:31:31.000Z | 2021-03-10T00:38:48.000Z | backend/test/bucoliq_web/views/page_view_test.exs | antogon/bucoliq | 0fe4727c4312322862d30014bdfae2530cc49de1 | [
"MIT"
] | null | null | null | defmodule BucoliqWeb.PageViewTest do
use BucoliqWeb.ConnCase, async: true
end
| 20 | 38 | 0.825 |
732358fff1184f82302c40cde0592bf8843685cc | 1,004 | exs | Elixir | parkapp_server/config/test.exs | bitmaker-software/parkapp | 39d9fd3cb8ab6bd1d54b776a5467eccf3b65f310 | [
"MIT"
] | 2 | 2018-11-06T12:21:16.000Z | 2018-11-21T10:20:17.000Z | parkapp_server/config/test.exs | bitmaker-software/parkapp | 39d9fd3cb8ab6bd1d54b776a5467eccf3b65f310 | [
"MIT"
] | null | null | null | parkapp_server/config/test.exs | bitmaker-software/parkapp | 39d9fd3cb8ab6bd1d54b776a5467eccf3b65f310 | [
"MIT"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :parkapp, ParkappWeb.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :parkapp, Parkapp.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "parkapp_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
config :parkapp, :embers_api, module: ParkappWeb.ApiIntegration.Embers.Mock
config :parkapp, :mb_way_api,
module: ParkappWeb.ApiIntegration.MBWay.Mock,
decrypt_secret: "33B62F65204D2F60A363C372DF19960828DFA4733016D3EBA4BBF38CBE3C29D5"
config :parkapp, :reservations_gen_server,
# in miliseconds
schedule_interval: 100,
# in seconds
time_to_enter_park: 1,
# in seconds
cancel_reservation_ban_time: 600,
sync_state_module: ParkappWeb.ApiIntegration.GenServers.StateSync.ReservationStateSync
| 28.685714 | 88 | 0.778884 |
73235bdde28a5d0f2de95e12c1248a29aab72d08 | 560 | ex | Elixir | dummy/lib/dummy_web/router.ex | zgohr/turbo_ecto | 2467be3f0923193349c08d18061efbc952523b21 | [
"MIT"
] | 4 | 2019-01-23T14:10:31.000Z | 2019-05-14T15:41:11.000Z | dummy/lib/dummy_web/router.ex | zgohr/turbo_ecto | 2467be3f0923193349c08d18061efbc952523b21 | [
"MIT"
] | 35 | 2019-02-19T02:11:33.000Z | 2021-09-01T07:09:42.000Z | dummy/lib/dummy_web/router.ex | zgohr/turbo_ecto | 2467be3f0923193349c08d18061efbc952523b21 | [
"MIT"
] | 4 | 2019-11-03T16:11:39.000Z | 2022-03-05T14:34:23.000Z | defmodule DummyWeb.Router do
use DummyWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", DummyWeb do
pipe_through :browser
get "/", PostController, :index
resources "/users", UserController
resources "/posts", PostController
end
# Other scopes may use custom stacks.
# scope "/api", DummyWeb do
# pipe_through :api
# end
end
| 18.666667 | 39 | 0.673214 |
732399e002cc276a4ebd6ba0398522e9d1432d49 | 106 | ex | Elixir | lib/pobcoin/repo.ex | Bentheburrito/pobcoin | 36f879ad1bae2660ace43dce66cada0c0b559dc9 | [
"MIT"
] | null | null | null | lib/pobcoin/repo.ex | Bentheburrito/pobcoin | 36f879ad1bae2660ace43dce66cada0c0b559dc9 | [
"MIT"
] | null | null | null | lib/pobcoin/repo.ex | Bentheburrito/pobcoin | 36f879ad1bae2660ace43dce66cada0c0b559dc9 | [
"MIT"
] | null | null | null | defmodule Pobcoin.Repo do
use Ecto.Repo,
otp_app: :pobcoin,
adapter: Ecto.Adapters.Postgres
end
| 17.666667 | 35 | 0.726415 |
7323a04f04461cd8a8ba35dbe0c8791841132457 | 102 | exs | Elixir | .iex.exs | nerves-project/system_registry | b0aa7d8826e5c37a374961338596fee10556ec79 | [
"Apache-2.0"
] | 9 | 2017-09-08T06:34:48.000Z | 2019-08-03T15:35:10.000Z | .iex.exs | nerves-project/system_registry | b0aa7d8826e5c37a374961338596fee10556ec79 | [
"Apache-2.0"
] | 13 | 2017-08-29T22:55:09.000Z | 2019-10-16T20:02:06.000Z | .iex.exs | nerves-project/system_registry | b0aa7d8826e5c37a374961338596fee10556ec79 | [
"Apache-2.0"
] | 4 | 2017-09-07T16:36:40.000Z | 2019-03-14T00:23:45.000Z | alias SystemRegistry, as: SR
alias SystemRegistry.Transaction, as: T
alias SystemRegistry.Node, as: N
| 25.5 | 39 | 0.803922 |
7323be095c71fb2ddc955a88acfaf615bbbe85e8 | 2,855 | exs | Elixir | apps/astarte_trigger_engine/test/amqp_events_consumer_test.exs | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 191 | 2018-03-30T13:23:08.000Z | 2022-03-02T12:05:32.000Z | apps/astarte_trigger_engine/test/amqp_events_consumer_test.exs | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 402 | 2018-03-30T13:37:00.000Z | 2022-03-31T16:47:10.000Z | apps/astarte_trigger_engine/test/amqp_events_consumer_test.exs | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 24 | 2018-03-30T13:29:48.000Z | 2022-02-28T11:10:26.000Z | #
# This file is part of Astarte.
#
# Copyright 2018 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule Astarte.TriggerEngine.AMQPEventsConsumerTest do
use ExUnit.Case
import Mox
alias Astarte.TriggerEngine.AMQPEventsConsumer
alias Astarte.TriggerEngine.Config
alias AMQP.{Basic, Channel, Connection}
@payload "some_payload"
@payload2 "some_other_payload"
@headers [one: "header", another: "different header", number: 42]
@headers2 [different: "headers", anothernumber: 100]
setup_all do
:ok = wait_for_connection()
amqp_consumer_options = Config.amqp_consumer_options!()
{:ok, conn} = Connection.open(amqp_consumer_options)
{:ok, chan} = Channel.open(conn)
{:ok, chan: chan}
end
describe "AMQP message consuming" do
setup :set_mox_global
test "calls EventsConsumer when receiving an event", %{chan: chan} do
map_headers =
Enum.reduce(@headers, %{}, fn {k, v}, acc ->
Map.put(acc, to_string(k), v)
end)
map_headers2 =
Enum.reduce(@headers2, %{}, fn {k, v}, acc ->
Map.put(acc, to_string(k), v)
end)
MockEventsConsumer
|> expect(:consume, fn payload, headers ->
assert payload == @payload
assert is_map(headers)
assert headers == map_headers
end)
|> expect(:consume, fn payload, headers ->
assert payload == @payload2
assert is_map(headers)
assert headers == map_headers2
end)
assert :ok == produce_event(chan, @payload, @headers)
assert :ok == produce_event(chan, @payload2, @headers2)
# Leave time for the consumer to ack
:timer.sleep(1000)
end
end
defp wait_for_connection(retry_count \\ 0)
# Avoid endless waiting (retry_count > 50 ~= 5 seconds)
defp wait_for_connection(retry_count) when retry_count > 50 do
{:error, :not_connected}
end
defp wait_for_connection(retry_count) do
%{channel: chan} = :sys.get_state(AMQPEventsConsumer)
if chan do
:ok
else
:timer.sleep(100)
wait_for_connection(retry_count + 1)
end
end
defp produce_event(chan, payload, headers) do
exchange = Config.events_exchange_name!()
routing_key = Config.events_routing_key!()
Basic.publish(chan, exchange, routing_key, payload, headers: headers)
end
end
| 28.267327 | 74 | 0.681261 |
7323d1b7d5a08db194edc2ad0774c014a5894b3d | 209 | exs | Elixir | aoc-2019/day1/test/part2_test.exs | danurna/elixir-playground | 6acb40e513d8ab324368b3ec5151b0a4fd88f849 | [
"MIT"
] | null | null | null | aoc-2019/day1/test/part2_test.exs | danurna/elixir-playground | 6acb40e513d8ab324368b3ec5151b0a4fd88f849 | [
"MIT"
] | null | null | null | aoc-2019/day1/test/part2_test.exs | danurna/elixir-playground | 6acb40e513d8ab324368b3ec5151b0a4fd88f849 | [
"MIT"
] | null | null | null | defmodule Part2Test do
use ExUnit.Case
test "calculates fuel" do
assert Part2.total_fuel([14]) == 2
assert Part2.total_fuel([1969]) == 966
assert Part2.total_fuel([100756]) == 50346
end
end
| 20.9 | 46 | 0.684211 |
7323d8c06e45291b494b2efdfba099e9e667035b | 5,469 | exs | Elixir | test/meeseeks/select_test.exs | RichMorin/meeseeks | d52a15a0b78acfc4d7b979d1df6e146482dc3a10 | [
"Apache-2.0",
"MIT"
] | 291 | 2017-03-27T15:53:36.000Z | 2022-03-14T23:01:42.000Z | test/meeseeks/select_test.exs | RichMorin/meeseeks | d52a15a0b78acfc4d7b979d1df6e146482dc3a10 | [
"Apache-2.0",
"MIT"
] | 70 | 2017-03-30T23:32:34.000Z | 2021-06-27T06:26:28.000Z | test/meeseeks/select_test.exs | RichMorin/meeseeks | d52a15a0b78acfc4d7b979d1df6e146482dc3a10 | [
"Apache-2.0",
"MIT"
] | 23 | 2017-06-18T10:29:04.000Z | 2021-11-04T13:08:12.000Z | defmodule Meeseeks.SelectTest do
use ExUnit.Case
import Meeseeks.CSS
alias Meeseeks.{Accumulator, Context, Error, Result, Select}
@document Meeseeks.Parser.parse("""
<html>
<head></head>
<body>
<div class="main">
<p id="first-p">1</p>
<p data-id="second-p">2</p>
<special:p>3</special:p>
<div class="secondary">
<p>4</p>
<p>5</p>
</div>
</div>
</body>
</html>
""")
test "select all paragraphs in divs" do
selector = css("div p")
expected = [
%Result{id: 8, document: @document},
%Result{id: 11, document: @document},
%Result{id: 14, document: @document},
%Result{id: 19, document: @document},
%Result{id: 22, document: @document}
]
assert Select.all(@document, selector, %{}) == expected
end
test "select all links" do
selector = css("link")
expected = []
assert Select.all(@document, selector, %{}) == expected
end
test "fetch_all paragraphs in divs" do
selector = css("div p")
expected =
{:ok,
[
%Result{id: 8, document: @document},
%Result{id: 11, document: @document},
%Result{id: 14, document: @document},
%Result{id: 19, document: @document},
%Result{id: 22, document: @document}
]}
assert Select.fetch_all(@document, selector, %{}) == expected
end
test "fetch_all links" do
selector = css("link")
expected = {:error, %Error{type: :select, reason: :no_match}}
assert Select.fetch_all(@document, selector, %{}) == expected
end
test "select first paragraph" do
selector = css("div.main > p")
expected = %Result{id: 8, document: @document}
assert Select.one(@document, selector, %{}) == expected
end
test "select first link" do
selector = css("link")
expected = nil
assert Select.one(@document, selector, %{}) == expected
end
test "fetch_one paragraph" do
selector = css("div.main > p")
expected = {:ok, %Result{id: 8, document: @document}}
assert Select.fetch_one(@document, selector, %{}) == expected
end
test "fetch_one link" do
selector = css("link")
expected = {:error, %Error{type: :select, reason: :no_match}}
assert Select.fetch_one(@document, selector, %{}) == expected
end
test "select all with class 'main'" do
selector = css(".main")
context = Context.add_accumulator(%{}, %Accumulator.All{})
expected = [%Result{id: 6, document: @document}]
assert Select.select(@document, selector, context) == expected
end
test "select first with id 'first-p'" do
selector = css("#first-p")
expected = %Result{id: 8, document: @document}
assert Select.one(@document, selector, %{}) == expected
end
test "select all with data attributes" do
selector = css("*[^data-]")
expected = [%Result{id: 11, document: @document}]
assert Select.all(@document, selector, %{}) == expected
end
test "select third paragraph" do
selector = css("p:nth-child(3)")
context = Context.add_accumulator(%{}, %Accumulator.One{})
expected = %Result{id: 14, document: @document}
assert Select.select(@document, selector, context) == expected
end
test "select second-of-type that does not have [data-id=second-p]" do
selector = css("p:nth-of-type(2):not([data-id=second-p])")
expected = %Result{id: 22, document: @document}
assert Select.one(@document, selector, %{}) == expected
end
test "select all with class 'nonexistent' (no match)" do
selector = css(".nonexistent")
expected = []
assert Select.all(@document, selector, %{}) == expected
end
test "select one with class 'nonexistent' (no match)" do
selector = css("*|*.nonexistent")
expected = nil
assert Select.one(@document, selector, %{}) == expected
end
test "select all with namespace 'special'" do
selector = css("special|*")
expected = [%Result{id: 14, document: @document}]
assert Select.all(@document, selector, %{}) == expected
end
@result %Result{id: 17, document: @document}
test "select all paragraphs from result" do
selector = css("p")
expected = [
%Result{id: 19, document: @document},
%Result{id: 22, document: @document}
]
assert Select.all(@result, selector, %{}) == expected
end
test "select next sibling p of first p from result" do
selector = css("#first-p + p")
expected = [%Result{id: 11, document: @document}]
assert Select.all(@document, selector, %{}) == expected
end
test "select next siblings of first p from result" do
selector = css("#first-p ~ *")
expected = [
%Result{id: 11, document: @document},
%Result{id: 14, document: @document},
%Result{id: 17, document: @document}
]
assert Select.all(@document, selector, %{}) == expected
end
test "select with string instead of selector" do
selector = "#first-p ~ *"
assert_raise Error, ~r/Type: :select\n\n Reason: :invalid_selectors/, fn ->
Select.all(@document, selector, %{})
end
end
test "select without an accumulator" do
selector = css("#first-p ~ *")
context = %{}
assert_raise Error, ~r/Type: :context\n\n Reason: :accumulator_required/, fn ->
Select.select(@document, selector, context)
end
end
end
| 28.936508 | 84 | 0.601207 |
7323f63338443c3da3bae0b3484a8277510e8e94 | 13,886 | ex | Elixir | lib/livebook_web/live/home_live.ex | FanJfly/livebook | 71319246c4b1ac9b557d864392113316e4187e41 | [
"Apache-2.0"
] | 1 | 2022-02-16T09:13:27.000Z | 2022-02-16T09:13:27.000Z | lib/livebook_web/live/home_live.ex | FanJfly/livebook | 71319246c4b1ac9b557d864392113316e4187e41 | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/live/home_live.ex | FanJfly/livebook | 71319246c4b1ac9b557d864392113316e4187e41 | [
"Apache-2.0"
] | null | null | null | defmodule LivebookWeb.HomeLive do
use LivebookWeb, :live_view
import LivebookWeb.SessionHelpers
import LivebookWeb.UserHelpers
alias LivebookWeb.{SidebarHelpers, ExploreHelpers}
alias Livebook.{Sessions, Session, LiveMarkdown, Notebook, FileSystem}
@impl true
def mount(params, _session, socket) do
if connected?(socket) do
Phoenix.PubSub.subscribe(Livebook.PubSub, "tracker_sessions")
end
sessions = Sessions.list_sessions()
notebook_infos = Notebook.Explore.visible_notebook_infos() |> Enum.take(3)
{:ok,
socket
|> SidebarHelpers.shared_home_handlers()
|> assign(
file: determine_file(params),
file_info: %{exists: true, access: :read_write},
sessions: sessions,
notebook_infos: notebook_infos,
page_title: "Livebook"
)}
end
@impl true
def render(assigns) do
~H"""
<div class="flex grow h-full">
<SidebarHelpers.sidebar>
<SidebarHelpers.shared_home_footer
socket={@socket}
current_user={@current_user}
user_path={Routes.home_path(@socket, :user)} />
</SidebarHelpers.sidebar>
<div class="grow px-6 py-8 overflow-y-auto">
<div class="max-w-screen-lg w-full mx-auto px-4 pb-8 space-y-4">
<div class="flex flex-col space-y-2 items-center pb-4 border-b border-gray-200
sm:flex-row sm:space-y-0 sm:justify-between">
<div class="text-2xl text-gray-800 font-semibold">
<img src="/images/logo-with-text.png" class="h-[50px]" alt="Livebook" />
<h1 class="sr-only">Livebook</h1>
</div>
<div class="flex space-x-2 pt-2" role="navigation" aria-label="new notebook">
<%= live_patch "Import",
to: Routes.home_path(@socket, :import, "url"),
class: "button-base button-outlined-gray whitespace-nowrap" %>
<button class="button-base button-blue" phx-click="new">
New notebook
</button>
</div>
</div>
<div class="h-80" role="region" aria-label="file system">
<.live_component module={LivebookWeb.FileSelectComponent}
id="home-file-select"
file={@file}
extnames={[LiveMarkdown.extension()]}
running_files={files(@sessions)}>
<div class="flex justify-end space-x-2">
<button class="button-base button-outlined-gray whitespace-nowrap"
phx-click="fork"
disabled={not path_forkable?(@file, @file_info)}>
<.remix_icon icon="git-branch-line" class="align-middle mr-1" />
<span>Fork</span>
</button>
<%= if file_running?(@file, @sessions) do %>
<%= live_redirect "Join session",
to: Routes.session_path(@socket, :page, session_id_by_file(@file, @sessions)),
class: "button-base button-blue" %>
<% else %>
<span {open_button_tooltip_attrs(@file, @file_info)}>
<button class="button-base button-blue"
phx-click="open"
disabled={not path_openable?(@file, @file_info, @sessions)}>
Open
</button>
</span>
<% end %>
</div>
</.live_component>
</div>
<div class="py-12" data-element="explore-section" role="region" aria-label="explore section">
<div class="mb-4 flex justify-between items-center">
<h2 class="uppercase font-semibold text-gray-500">
Explore
</h2>
<%= live_redirect to: Routes.explore_path(@socket, :page),
class: "flex items-center text-blue-600" do %>
<span class="font-semibold">See all</span>
<.remix_icon icon="arrow-right-line" class="align-middle ml-1" />
<% end %>
</div>
<div class="grid grid-cols-1 md:grid-cols-3 gap-4">
<%# Note: it's fine to use stateless components in this comprehension,
because @notebook_infos never change %>
<%= for info <- @notebook_infos do %>
<ExploreHelpers.notebook_card notebook_info={info} socket={@socket} />
<% end %>
</div>
</div>
<div class="py-12" role="region" aria-label="running sessions">
<.live_component module={LivebookWeb.HomeLive.SessionListComponent}
id="session-list"
sessions={@sessions}/>
</div>
</div>
</div>
</div>
<%= if @live_action == :user do %>
<.current_user_modal
return_to={Routes.home_path(@socket, :page)}
current_user={@current_user} />
<% end %>
<%= if @live_action == :close_session do %>
<.modal class="w-full max-w-xl" return_to={Routes.home_path(@socket, :page)}>
<.live_component module={LivebookWeb.HomeLive.CloseSessionComponent}
id="close-session"
return_to={Routes.home_path(@socket, :page)}
session={@session} />
</.modal>
<% end %>
<%= if @live_action == :import do %>
<.modal class="w-full max-w-xl" return_to={Routes.home_path(@socket, :page)}>
<.live_component module={LivebookWeb.HomeLive.ImportComponent}
id="import"
tab={@tab}
import_opts={@import_opts} />
</.modal>
<% end %>
<%= if @live_action == :edit_sessions do %>
<.modal class="w-full max-w-xl" return_to={Routes.home_path(@socket, :page)}>
<.live_component module={LivebookWeb.HomeLive.EditSessionsComponent}
id="edit-sessions"
action={@bulk_action}
return_to={Routes.home_path(@socket, :page)}
sessions={@sessions}
selected_sessions={selected_sessions(@sessions, @selected_session_ids)} />
</.modal>
<% end %>
"""
end
defp open_button_tooltip_attrs(file, file_info) do
if regular?(file, file_info) and not writable?(file_info) do
[class: "tooltip top", data_tooltip: "This file is write-protected, please fork instead"]
else
[]
end
end
@impl true
def handle_params(%{"session_id" => session_id}, _url, socket) do
session = Enum.find(socket.assigns.sessions, &(&1.id == session_id))
{:noreply, assign(socket, session: session)}
end
def handle_params(%{"action" => action}, _url, socket)
when socket.assigns.live_action == :edit_sessions do
{:noreply, assign(socket, bulk_action: action)}
end
def handle_params(%{"tab" => tab} = params, _url, socket)
when socket.assigns.live_action == :import do
import_opts = [url: params["url"]]
{:noreply, assign(socket, tab: tab, import_opts: import_opts)}
end
def handle_params(%{"url" => url}, _url, socket)
when socket.assigns.live_action == :public_import do
origin = Notebook.ContentLoader.url_to_location(url)
origin
|> Notebook.ContentLoader.fetch_content_from_location()
|> case do
{:ok, content} ->
socket = import_content(socket, content, origin: origin)
{:noreply, socket}
{:error, _message} ->
{:noreply, push_patch(socket, to: Routes.home_path(socket, :import, "url", url: url))}
end
end
def handle_params(%{"path" => path} = _params, _uri, socket)
when socket.assigns.live_action == :public_open do
file = FileSystem.File.local(path)
if file_running?(file, socket.assigns.sessions) do
session_id = session_id_by_file(file, socket.assigns.sessions)
{:noreply, push_redirect(socket, to: Routes.session_path(socket, :page, session_id))}
else
{:noreply, open_notebook(socket, FileSystem.File.local(path))}
end
end
def handle_params(_params, _url, socket), do: {:noreply, socket}
@impl true
def handle_event("new", %{}, socket) do
{:noreply, create_session(socket)}
end
def handle_event("fork", %{}, socket) do
file = socket.assigns.file
socket =
case import_notebook(file) do
{:ok, {notebook, messages}} ->
notebook = Notebook.forked(notebook)
images_dir = Session.images_dir_for_notebook(file)
socket
|> put_import_warnings(messages)
|> create_session(
notebook: notebook,
copy_images_from: images_dir,
origin: {:file, file}
)
{:error, error} ->
put_flash(socket, :error, Livebook.Utils.upcase_first(error))
end
{:noreply, socket}
end
def handle_event("open", %{}, socket) do
file = socket.assigns.file
{:noreply, open_notebook(socket, file)}
end
def handle_event("bulk_action", %{"action" => "disconnect"} = params, socket) do
socket = assign(socket, selected_session_ids: params["session_ids"])
{:noreply, push_patch(socket, to: Routes.home_path(socket, :edit_sessions, "disconnect"))}
end
def handle_event("bulk_action", %{"action" => "close_all"} = params, socket) do
socket = assign(socket, selected_session_ids: params["session_ids"])
{:noreply, push_patch(socket, to: Routes.home_path(socket, :edit_sessions, "close_all"))}
end
def handle_event("disconnect_runtime", %{"id" => session_id}, socket) do
session = Enum.find(socket.assigns.sessions, &(&1.id == session_id))
Session.disconnect_runtime(session.pid)
{:noreply, socket}
end
def handle_event("fork_session", %{"id" => session_id}, socket) do
session = Enum.find(socket.assigns.sessions, &(&1.id == session_id))
%{images_dir: images_dir} = session
data = Session.get_data(session.pid)
notebook = Notebook.forked(data.notebook)
origin =
if data.file do
{:file, data.file}
else
data.origin
end
{:noreply,
create_session(socket,
notebook: notebook,
copy_images_from: images_dir,
origin: origin
)}
end
def handle_event("open_autosave_directory", %{}, socket) do
file =
Livebook.Settings.autosave_path()
|> FileSystem.Utils.ensure_dir_path()
|> FileSystem.File.local()
file_info = %{exists: true, access: file_access(file)}
{:noreply, assign(socket, file: file, file_info: file_info)}
end
@impl true
def handle_info({:set_file, file, info}, socket) do
file_info = %{exists: info.exists, access: file_access(file)}
{:noreply, assign(socket, file: file, file_info: file_info)}
end
def handle_info({:session_created, session}, socket) do
if session in socket.assigns.sessions do
{:noreply, socket}
else
{:noreply, assign(socket, sessions: [session | socket.assigns.sessions])}
end
end
def handle_info({:session_updated, session}, socket) do
sessions =
Enum.map(socket.assigns.sessions, fn other ->
if other.id == session.id, do: session, else: other
end)
{:noreply, assign(socket, sessions: sessions)}
end
def handle_info({:session_closed, session}, socket) do
sessions = Enum.reject(socket.assigns.sessions, &(&1.id == session.id))
{:noreply, assign(socket, sessions: sessions)}
end
def handle_info({:import_content, content, session_opts}, socket) do
socket = import_content(socket, content, session_opts)
{:noreply, socket}
end
defp files(sessions) do
Enum.map(sessions, & &1.file)
end
defp path_forkable?(file, file_info) do
regular?(file, file_info)
end
defp path_openable?(file, file_info, sessions) do
regular?(file, file_info) and not file_running?(file, sessions) and
writable?(file_info)
end
defp regular?(file, file_info) do
file_info.exists and not FileSystem.File.dir?(file)
end
defp writable?(file_info) do
file_info.access in [:read_write, :write]
end
defp file_running?(file, sessions) do
running_files = files(sessions)
file in running_files
end
defp import_notebook(file) do
with {:ok, content} <- FileSystem.File.read(file) do
{:ok, LiveMarkdown.notebook_from_livemd(content)}
end
end
defp session_id_by_file(file, sessions) do
session = Enum.find(sessions, &(&1.file == file))
session.id
end
defp import_content(socket, content, session_opts) do
{notebook, messages} = Livebook.LiveMarkdown.notebook_from_livemd(content)
socket =
socket
|> put_import_warnings(messages)
|> put_flash(
:info,
"You have imported a notebook, no code has been executed so far. You should read and evaluate code as needed."
)
session_opts = Keyword.merge(session_opts, notebook: notebook)
create_session(socket, session_opts)
end
defp file_access(file) do
case FileSystem.File.access(file) do
{:ok, access} -> access
{:error, _} -> :none
end
end
defp selected_sessions(sessions, selected_session_ids) do
Enum.filter(sessions, &(&1.id in selected_session_ids))
end
defp determine_file(%{"path" => path} = _params) do
cond do
File.dir?(path) ->
path
|> FileSystem.Utils.ensure_dir_path()
|> FileSystem.File.local()
File.regular?(path) ->
FileSystem.File.local(path)
true ->
Livebook.Config.local_filesystem_home()
end
end
defp determine_file(_params), do: Livebook.Config.local_filesystem_home()
defp open_notebook(socket, file) do
case import_notebook(file) do
{:ok, {notebook, messages}} ->
socket
|> put_import_warnings(messages)
|> create_session(notebook: notebook, file: file, origin: {:file, file})
{:error, error} ->
put_flash(socket, :error, Livebook.Utils.upcase_first(error))
end
end
end
| 33.460241 | 118 | 0.614792 |
7323fcf3aa538095be578a567002f7152bdabda8 | 220 | exs | Elixir | priv/repo/migrations/20170511133900_change_employees_date_types.exs | EDENLABLLC/prm.api | 86743f26874f47ce3d48010ccf5d2cd596a3474b | [
"Apache-2.0"
] | 1 | 2017-07-27T16:03:28.000Z | 2017-07-27T16:03:28.000Z | priv/repo/migrations/20170511133900_change_employees_date_types.exs | EDENLABLLC/prm.api | 86743f26874f47ce3d48010ccf5d2cd596a3474b | [
"Apache-2.0"
] | null | null | null | priv/repo/migrations/20170511133900_change_employees_date_types.exs | EDENLABLLC/prm.api | 86743f26874f47ce3d48010ccf5d2cd596a3474b | [
"Apache-2.0"
] | null | null | null | defmodule PRM.Repo.Migrations.ChangeEmployeesDateTypes do
use Ecto.Migration
def change do
alter table(:employees) do
modify :start_date, :date, null: false
modify :end_date, :date
end
end
end
| 20 | 57 | 0.709091 |
732432b2f3fad88c017c7bee2466bd2db7b66101 | 1,547 | ex | Elixir | apps/ewallet/lib/ewallet/web/v1/overlays/key_overlay.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet/lib/ewallet/web/v1/overlays/key_overlay.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet/lib/ewallet/web/v1/overlays/key_overlay.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWallet.Web.V1.KeyOverlay do
@moduledoc """
Overlay for the Key schema.
"""
@behaviour EWallet.Web.V1.Overlay
alias EWallet.Web.V1.AccountOverlay
def preload_assocs,
do: []
def default_preload_assocs,
do: []
def search_fields,
do: [
:access_key
]
def sort_fields,
do: [
:access_key,
:inserted_at,
:updated_at
]
def self_filter_fields,
do: [
name: nil,
access_key: nil,
expired: nil,
global_role: nil,
inserted_at: :datetime,
updated_at: :datetime,
deleted_at: :datetime
]
def filter_fields,
do: [
name: nil,
access_key: nil,
expired: nil,
global_role: nil,
inserted_at: :datetime,
updated_at: :datetime,
deleted_at: :datetime,
account: AccountOverlay.self_filter_fields()
]
def pagination_fields,
do: [
:id,
:inserted_at,
:updated_at
]
end
| 21.788732 | 74 | 0.656755 |
73247ff09e0cfd39e1620ae1b9a0caea3fd3f906 | 331 | exs | Elixir | .formatter.exs | manulitic/instream | 5ad521dcf6a456325ba30b4c0dbb40f1f5107f32 | [
"Apache-2.0"
] | null | null | null | .formatter.exs | manulitic/instream | 5ad521dcf6a456325ba30b4c0dbb40f1f5107f32 | [
"Apache-2.0"
] | null | null | null | .formatter.exs | manulitic/instream | 5ad521dcf6a456325ba30b4c0dbb40f1f5107f32 | [
"Apache-2.0"
] | null | null | null | export_locals_without_parens = [
database: 1,
field: 1,
field: 2,
measurement: 1,
tag: 1,
tag: 2
]
[
inputs: [
"{bench,config,lib,test}/**/*.{ex,exs}",
"{.credo,.formatter,mix}.exs"
],
locals_without_parens: export_locals_without_parens,
export: [locals_without_parens: export_locals_without_parens]
]
| 18.388889 | 63 | 0.670695 |
7324cf96a064511aaa3d567f05ab34eb76cd9561 | 609 | ex | Elixir | lib/mix/tasks/tail.ex | rliebling/ex_aws | a8bfeff478e4a7584e765bb8f57ca6a8a5a4dad0 | [
"MIT"
] | 1 | 2021-12-16T20:32:27.000Z | 2021-12-16T20:32:27.000Z | lib/mix/tasks/tail.ex | rliebling/ex_aws | a8bfeff478e4a7584e765bb8f57ca6a8a5a4dad0 | [
"MIT"
] | 11 | 2021-08-02T18:13:25.000Z | 2022-03-23T20:53:41.000Z | lib/mix/tasks/tail.ex | rliebling/ex_aws | a8bfeff478e4a7584e765bb8f57ca6a8a5a4dad0 | [
"MIT"
] | 1 | 2021-06-23T14:28:08.000Z | 2021-06-23T14:28:08.000Z | defmodule Mix.Tasks.Aws.Kinesis.Tail do
use Mix.Task
@shortdoc "tails a stream"
@moduledoc """
Tails a Stream
## Usage
aws.kinesis.tail [stream_name] [options]
## Options
--poll N Time in seconds between polling. Default: 5
--debug Sets debug_requests: true on ex_aws. Logs all kinesis requests
--from Sequence number to start at. If unspecified, LATEST is used
## Examples
$ mix aws.kinesis.tail my-kinesis-stream
$ mix aws.kinesis.tail logs --debug --poll 10
"""
def run(_) do
raise "Not yet implemented in 1.0.0-beta1"
end
end
| 20.3 | 79 | 0.650246 |
7324f9e2b3f9bd39b49b2f01b7da6106d52c5652 | 1,512 | ex | Elixir | clients/container_analysis/lib/google_api/container_analysis/v1/model/digest.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/container_analysis/lib/google_api/container_analysis/v1/model/digest.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/container_analysis/lib/google_api/container_analysis/v1/model/digest.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ContainerAnalysis.V1.Model.Digest do
@moduledoc """
Digest information.
## Attributes
* `algo` (*type:* `String.t`, *default:* `nil`) - `SHA1`, `SHA512` etc.
* `digestBytes` (*type:* `String.t`, *default:* `nil`) - Value of the digest.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:algo => String.t() | nil,
:digestBytes => String.t() | nil
}
field(:algo)
field(:digestBytes)
end
defimpl Poison.Decoder, for: GoogleApi.ContainerAnalysis.V1.Model.Digest do
def decode(value, options) do
GoogleApi.ContainerAnalysis.V1.Model.Digest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ContainerAnalysis.V1.Model.Digest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.24 | 81 | 0.712302 |
732584afc0f99f9c58b58c262efb64c0cf856aa8 | 3,886 | ex | Elixir | lib/plug/cowboy/conn.ex | wojtekmach/plug_cowboy | 45edb6ea83a854eaee7a9f88d8499599439f937a | [
"Apache-2.0"
] | 183 | 2018-10-18T18:50:17.000Z | 2022-01-11T22:28:20.000Z | deps/plug_cowboy/lib/plug/cowboy/conn.ex | rwtrecs/rocketseat-nlw5-inmana | 8ce8bc32e0bdd005c423394bb163945747b557e2 | [
"MIT"
] | 76 | 2018-10-19T07:54:00.000Z | 2022-02-26T13:22:36.000Z | deps/plug_cowboy/lib/plug/cowboy/conn.ex | adrianomota/blog | ef3b2d2ed54f038368ead8234d76c18983caa75b | [
"MIT"
] | 46 | 2018-10-18T21:38:44.000Z | 2021-12-29T12:39:25.000Z | defmodule Plug.Cowboy.Conn do
@behaviour Plug.Conn.Adapter
@moduledoc false
def conn(req) do
%{
path: path,
host: host,
port: port,
method: method,
headers: headers,
qs: qs,
peer: {remote_ip, _}
} = req
%Plug.Conn{
adapter: {__MODULE__, req},
host: host,
method: method,
owner: self(),
path_info: split_path(path),
port: port,
remote_ip: remote_ip,
query_string: qs,
req_headers: to_headers_list(headers),
request_path: path,
scheme: String.to_atom(:cowboy_req.scheme(req))
}
end
@impl true
def send_resp(req, status, headers, body) do
headers = to_headers_map(headers)
status = Integer.to_string(status) <> " " <> Plug.Conn.Status.reason_phrase(status)
req = :cowboy_req.reply(status, headers, body, req)
{:ok, nil, req}
end
@impl true
def send_file(req, status, headers, path, offset, length) do
%File.Stat{type: :regular, size: size} = File.stat!(path)
length =
cond do
length == :all -> size
is_integer(length) -> length
end
body = {:sendfile, offset, length, path}
headers = to_headers_map(headers)
req = :cowboy_req.reply(status, headers, body, req)
{:ok, nil, req}
end
@impl true
def send_chunked(req, status, headers) do
headers = to_headers_map(headers)
req = :cowboy_req.stream_reply(status, headers, req)
{:ok, nil, req}
end
@impl true
def chunk(req, body) do
:cowboy_req.stream_body(body, :nofin, req)
end
@impl true
def read_req_body(req, opts) do
length = Keyword.get(opts, :length, 8_000_000)
read_length = Keyword.get(opts, :read_length, 1_000_000)
read_timeout = Keyword.get(opts, :read_timeout, 15_000)
opts = %{length: read_length, period: read_timeout}
read_req_body(req, opts, length, [])
end
defp read_req_body(req, opts, length, acc) when length >= 0 do
case :cowboy_req.read_body(req, opts) do
{:ok, data, req} -> {:ok, IO.iodata_to_binary([acc | data]), req}
{:more, data, req} -> read_req_body(req, opts, length - byte_size(data), [acc | data])
end
end
defp read_req_body(req, _opts, _length, acc) do
{:more, IO.iodata_to_binary(acc), req}
end
@impl true
def inform(req, status, headers) do
:cowboy_req.inform(status, to_headers_map(headers), req)
end
@impl true
def push(req, path, headers) do
opts =
case {req.port, req.sock} do
{:undefined, {_, port}} -> %{port: port}
{port, _} when port in [80, 443] -> %{}
{port, _} -> %{port: port}
end
:cowboy_req.push(path, to_headers_map(headers), req, opts)
end
@impl true
def get_peer_data(%{peer: {ip, port}, cert: cert}) do
%{
address: ip,
port: port,
ssl_cert: if(cert == :undefined, do: nil, else: cert)
}
end
@impl true
def get_http_protocol(req) do
:cowboy_req.version(req)
end
## Helpers
defp to_headers_list(headers) when is_list(headers) do
headers
end
defp to_headers_list(headers) when is_map(headers) do
:maps.to_list(headers)
end
defp to_headers_map(headers) when is_list(headers) do
# Group set-cookie headers into a list for a single `set-cookie`
# key since cowboy 2 requires headers as a map.
Enum.reduce(headers, %{}, fn
{key = "set-cookie", value}, acc ->
case acc do
%{^key => existing} -> %{acc | key => [value | existing]}
%{} -> Map.put(acc, key, [value])
end
{key, value}, acc ->
case acc do
%{^key => existing} -> %{acc | key => existing <> ", " <> value}
%{} -> Map.put(acc, key, value)
end
end)
end
defp split_path(path) do
segments = :binary.split(path, "/", [:global])
for segment <- segments, segment != "", do: segment
end
end
| 25.565789 | 92 | 0.60808 |
7325888493b580aa1fb0bd6ab0ddeadf9d9fdaad | 388 | exs | Elixir | priv/tenants/tenant_migrations/20170801141007_tenant_orderitems.exs | arcseldon/exploring-elixir | 0115aed80e5905384e7277dfe740d09e3a496b7b | [
"Apache-2.0"
] | 30 | 2017-07-03T23:53:37.000Z | 2021-03-14T21:27:14.000Z | priv/tenants/tenant_migrations/20170801141007_tenant_orderitems.exs | arcseldon/exploring-elixir | 0115aed80e5905384e7277dfe740d09e3a496b7b | [
"Apache-2.0"
] | 1 | 2018-05-25T20:00:24.000Z | 2018-05-30T09:40:09.000Z | priv/tenants/tenant_migrations/20170801141007_tenant_orderitems.exs | aseigo/exploring-elixir | 0115aed80e5905384e7277dfe740d09e3a496b7b | [
"Apache-2.0"
] | 4 | 2017-07-27T09:07:17.000Z | 2019-05-22T11:14:25.000Z | defmodule ExploringElixir.Repo.Tenants.Migrations.TenantOrderitems do
use Ecto.Migration
def change do
create table(:orderitems, primary_key: false) do
add :item_id, :integer
add :amount, :integer
add :order_id, references(:orders, on_delete: :delete_all)
timestamps()
end
create unique_index(:orderitems, [:order_id, :item_id])
end
end
| 24.25 | 69 | 0.695876 |
7325a34495691fc685b36e3004389dc761076f8f | 1,856 | ex | Elixir | lib/hexpm/emails/bamboo.ex | hubertpompecki/hexpm | 5cd4208b07a70bf2e1490930bf5d577978793b50 | [
"Apache-2.0"
] | null | null | null | lib/hexpm/emails/bamboo.ex | hubertpompecki/hexpm | 5cd4208b07a70bf2e1490930bf5d577978793b50 | [
"Apache-2.0"
] | null | null | null | lib/hexpm/emails/bamboo.ex | hubertpompecki/hexpm | 5cd4208b07a70bf2e1490930bf5d577978793b50 | [
"Apache-2.0"
] | null | null | null | defmodule Hexpm.Emails.Bamboo.SESAdapter do
require Logger
@behaviour Bamboo.Adapter
@backoff 100
@backoff_times 5
def deliver(email, _config) do
if email.headers != %{} do
raise "headers not supported for Hexpm.Emails.Bamboo.SESAdapter"
end
destination = %{
to: emails(email.to),
cc: emails(email.cc),
bcc: emails(email.bcc)
}
message = ExAws.SES.build_message(email.html_body, email.text_body, email.subject)
request = ExAws.SES.send_email(destination, message, email(email.from), [])
send_email(request, email, 0)
end
def handle_config(config) do
config
end
defp send_email(request, email, times) do
request
|> ExAws.request()
|> maybe_retry(request, email, times)
end
defp maybe_retry({:error, {:http_error, 454, _body}} = error, request, email, times) do
if times > @backoff_times do
Logger.warn("AWS SES throttled ##{times}")
raise "failed to send email\n\n#{inspect(email)}\n\n#{inspect(error)}"
else
Process.sleep(@backoff * trunc(:math.pow(2, times)))
send_email(request, email, times + 1)
end
end
defp maybe_retry({:error, _} = error, _request, email, _times) do
raise "failed to send email\n\n#{inspect(email)}\n\n#{inspect(error)}"
end
defp maybe_retry({:ok, result}, _request, _email, _times) do
result
end
defp emails(emails), do: emails |> List.wrap() |> Enum.map(&email/1)
defp email({name, email}), do: "#{name} <#{email}>"
defp email(email), do: email
end
defimpl Bamboo.Formatter, for: Hexpm.Accounts.User do
def format_email_address(user, _opts) do
{user.username, Hexpm.Accounts.User.email(user, :primary)}
end
end
defimpl Bamboo.Formatter, for: Hexpm.Accounts.Email do
def format_email_address(email, _opts) do
{email.user.username, email.email}
end
end
| 26.514286 | 89 | 0.674569 |
732608ce0f4e302d49b63a3c42da9a066e87cd3c | 10,468 | ex | Elixir | lib/junex.ex | boostingtech/juno_wrapper | f2e4ab2090f395c0d7e8254d9c0865689d4652a1 | [
"MIT"
] | 2 | 2020-12-11T22:36:38.000Z | 2021-01-25T14:44:03.000Z | lib/junex.ex | boostingtech/junex | f2e4ab2090f395c0d7e8254d9c0865689d4652a1 | [
"MIT"
] | null | null | null | lib/junex.ex | boostingtech/junex | f2e4ab2090f395c0d7e8254d9c0865689d4652a1 | [
"MIT"
] | null | null | null | defmodule Junex do
@moduledoc """
Junex is a library for help you to interact to the Juno API in a easier way!
## WARNINGS
1. Although you can build the maps mannualy, like `charge_info` and `payment_billing_info`,
Junex provide a bunch of helper functions to build the exactly structure that the Juno API requests, so
consider using them!
2. All main function receive as last param an atom that could be `:prod` or `:sandbox`
3. The `create_client` and `get_access_token` functions can also been called with config from `config.exs`
## Config
You can provide config information for Junex in three ways:
1. On `config.exs` config file
2. Calling from code, with `Junex.configure/1 or /2`
3. Providing manually all configs
The available configs are:
1. `client_id`
2. `client_secret`
3. `resource_token`
4. `mode`
Example config on `config.exs`:
config :junex, :tokens,
client_id: System.get_env("CLIENT_ID"),
client_secret: System.get_env("CLIENT_SECRET"),
resource_token: System.get_env("RESOURCE_TOKEN"),
mode: :prod
## Example of use
As an example, see how you could create a charge and a payment:
First, you need an `access_token`, to get one, you need to have a `client_id` and `client_secret` pair.
You can generate one for production or sandbox on the Juno's Integration screen.
After that:
defmodule MyApp.Payment do
def jwt_token(client_id, client_secret) do
token_params = [
client_id: System.get_env("CLIENT_ID"),
client_secret: System.get_env("CLIENT_SECRET"),
mode: :sandbox
]
case Junex.get_access_token(token_params) do
{:ok, token} ->
token
{:error, error} ->
{:error, error}
end
end
end
Now you have an `access_token` you can make another requests! Let create a charge now:
For this, you need first to create a client, providing the `access_token` and also the `resource_token`, that
is the `Private Token` that you also can generate on the Integration screen.
defmodule MyApp.Payment do
def charges do
with {:ok, client} <- Junex.create_client/2 or /1,
{:ok, charge_info} <- Junex.get_charge_info(params),
{:ok, charge_billing_info} <- Junex.get_charge_billing_info(params),
{:ok, charges} <-
Junex.create_charges(client, charge_info: charge_info, charge_billing_info: charge_billing_info) do
charges
else
{:error, error} ->
{:error, error}
end
end
end
Ok, charges created and returned as a list, so, if the `payment_type` was `:credit_card`, you can
generate the payment in sequence
defmodule MyApp.Payment do
def payment do
with {:ok, card_info} <- Junex.get_card_info(params),
{:ok, payment_billing_info} <- Junex.get_payment_billing_info(params),
{:ok, params} <-
Junex.get_payment_info(card_info: card_info, payment_billing_info: payment_billing_info),
payment_results <- charges |> Task.async_stream(&do_payment(&1, params)) do
payment_results
else
error ->
error
end
end
def do_payment(charge) do
case Junex.create_payment(client, payment_info: payment_info, mode: :sandbox) do
{:ok, payment} ->
payment
{:error, error} ->
{:error, error}
end
end
end
"""
# ----------- Junex Settings -----------
@doc """
Provides configuration settings for accessing Juno server.
The specified configuration applies globally. Use `Junex.configure/2`
for setting different configurations on each processes.
## Example
Junex.configure(
client_id: System.get_env("CLIENT_ID"),
client_secret: System.get_env("CLIENT_SECRET"),
mode: System.get_env("JUNO_MODE")
)
"""
defdelegate configure(tokens), to: Junex.Config, as: :set
@doc """
Provides configuration settings for accessing Juno server.
## Options
The `scope` can have one of the following values.
* `:global` - configuration is shared for all processes.
* `:process` - configuration is isolated for each process.
## Example
Junex.configure(
:global,
client_id: System.get_env("CLIENT_ID"),
client_secret: System.get_env("CLIENT_SECRET"),
mode: System.get_env("JUNO_MODE")
)
"""
defdelegate configure(scope, tokens), to: Junex.Config, as: :set
@doc """
Returns current Junex configuration settings for accessing Juno server.
"""
defdelegate configure, to: Junex.Config, as: :get
# ----------- Junex Client -----------
@doc """
Returns a new client to perform other requests!
## Params
- access_token: Got from Junex.Auth.get_access_token
- resource_token: You can generate one on your Juno's account, is the "Private Token"
## Examples
Junex.Client.create(
System.get_env("ACCESS_TOKEN"),
System.get_env("RESOURCE_TOKEN")
)
"""
defdelegate create_client(access_token, resource_token), to: Junex.Client, as: :create
@doc """
Same as `Junex.create_client/2` however uses config from `config.exs`
## Params
- access_token: Got from Junex.get_access_token/1 or /0
## Examples
Junex.create_client(access_token)
"""
defdelegate create_client(access_token), to: Junex.Client, as: :create
# ----------- Junex Charges -----------
@doc """
Returns a charge_info map to be used on Junex.create_charges/2
## Example
Junex.get_charge_info(
description: "description",
amount: 123,
installments: 2,
payment_type: :boleto
)
"""
defdelegate get_charge_info(values), to: Junex.API.Charge, as: :get_charge_info
@doc """
Return a new charge_billing_info map to be used on Junex.create_charges/2
## Example
Junex.get_charge_billing_info(
name: "name",
document: "document",
email: "email",
phone: "phone"
)
"""
defdelegate get_charge_billing_info(values), to: Junex.API.Charge, as: :get_charge_billing_info
@doc """
Creates and return a new charge
## Parameters
- client: Got from Junex.create_client/1
- charge_info: Build mannualy or generated with Junex.get_charge_info/1
- billing: Build mannualy or generated with Junex.get_charge_billing_info/1
- mode: :prod | :sandbox
## Example
Junex.create_charges(
Junex.create_client(params),
Map.merge(Junex.get_charge_info(), Junex.get_charge_billing_info())
)
"""
defdelegate create_charges(client, values), to: Junex.API.Charge, as: :create_charges
@doc """
Returns the latest charge status
## Parameters
- client: Got from Junex.create_client/1
- charge_id: One of results do Junex.create_charges/2
- mode: :prod | :sandbox
## Example
Junex.check_charge_status(
Junex.create_client(params),
client_id: "client_id",
mode: :sandbox
)
"""
defdelegate check_charge_status(client, values), to: Junex.API.Charge, as: :check_charge_status
# ----------- Junex Account -----------
@doc """
List all possible banks for Juno transfers
## Parameters
- client: from Junex.create_client/1
- mode: :prod | :sandbox
## Examples
Junex.list_banks(Junex.create_client(), :sandbox)
"""
defdelegate list_banks(client, values), to: Junex.API.Account, as: :list_banks
@doc """
Return you current balance!
## Parameters
- client: Get from Junex.create_client/1
- mode: :prod | :sandbox
## Examples
Junex.get_balance(Junex.create_client(), :sandbox)
"""
defdelegate get_balance(client, values), to: Junex.API.Account, as: :get_balance
# ----------- Junex Payment -----------
@doc """
Returns a payment_billing_info map to use on Junex.get_payment_info/1
## Examples
Junex.get_payment_billing_info(
email: "email",
street: "street",
st_number: 12,
city: "city",
state: "state",
complement: "complement",
post_code: "post_code"
)
"""
defdelegate get_payment_billing_info(values),
to: Junex.API.Payment,
as: :get_payment_billing_info
@doc """
Returns a payment_info map to be used on Junex.create_payment/2
## Parameters
- charge_id: Result of one entries of Junex.create_charges/2
- card_info: Build mannualy or got from Junex.get_card_info/1
- payment_billing_info: Build mannually or got from Junex.get_payment_billing_info/1
## Example
Junex.get_payment_info(
charge_id: "charge_id",
card_info: Junex.get_card_info(params),
payment_billing_info: Junex.get_payment_billing_info(params)
)
"""
defdelegate get_payment_info(values), to: Junex.API.Payment, as: :get_payment_info
@doc """
Creates and returns a new Payment
## Parameters
- client: Got from Junex.create_client/1
- payment_info: Build mannualy or got from Junex.get_payment_info/1
- mode: :prod | :sandbox
## Example
Junex.create_payment(
Junex.create_client(params),
payment_info: Junex.get_payment_info(params),
mode: :sandbox
)
"""
defdelegate create_payment(client, values), to: Junex.API.Payment, as: :create_payment
@doc """
Return a card_info map to use on Junex.get_payment_info/1
"""
defdelegate get_card_info(values), to: Junex.API.Payment, as: :get_card_info
# ----------- Junex Auth -----------
@doc """
Return a access_token to be used on other Junex requests
You can get the client_id and client_secret on the Integration section
on your Juno account and generate the pair!
## Parameters
- client_id: string
- client_secret: string
- mode: :prod | :sandbox
## Examples
Junex.Auth.get_access_token(client_id: "client_id", client_secret: "client_secret", mode: :mode)
"""
defdelegate get_access_token(values), to: Junex.Auth, as: :get_access_token
@doc """
Same as Junex.get_access_token/1, however, uses config from `config.exs`
"""
defdelegate get_access_token, to: Junex.Auth, as: :get_access_token
end
| 30.08046 | 117 | 0.646637 |
7326096aaea67d4cfd197519ac7e1e60184a932a | 1,088 | exs | Elixir | test/google_id_token/jwk_set/endpoint_test.exs | camcaine/google_id_token | c5e7452f0ae1210b84402374e4fde3521175bdbc | [
"MIT"
] | null | null | null | test/google_id_token/jwk_set/endpoint_test.exs | camcaine/google_id_token | c5e7452f0ae1210b84402374e4fde3521175bdbc | [
"MIT"
] | null | null | null | test/google_id_token/jwk_set/endpoint_test.exs | camcaine/google_id_token | c5e7452f0ae1210b84402374e4fde3521175bdbc | [
"MIT"
] | null | null | null | defmodule GoogleIDToken.JWKSet.EndpointTest do
use ExUnit.Case, async: true
alias GoogleIDToken.JWKSet
alias GoogleIDToken.JWKSet.{Endpoint, Endpointable}
@url "https://www.googleapis.com/oauth2/v3/certs"
setup do
[endpoint: Endpointable.to_endpoint(@url)]
end
describe "Endpointable.to_endpoint/1" do
test "implementations", %{endpoint: endpoint} do
assert %Endpoint{} = Endpointable.to_endpoint(endpoint)
assert %Endpoint{} = Endpointable.to_endpoint(endpoint.uri)
assert %Endpoint{} = Endpointable.to_endpoint(@url)
end
end
describe "Endpoint type" do
test "enforces keys" do
assert %Endpoint{} = struct!(Endpoint, uri: %URI{})
assert_raise KeyError, fn ->
struct!(Endpoint, bad_key: "")
end
end
end
describe "Endpoint.get/1" do
test "gets the public keys", %{endpoint: endpoint} do
assert {:ok, _, %{}} = Endpoint.get(endpoint)
end
test "parses the body into a JWT Set", %{endpoint: endpoint} do
assert {:ok, %JWKSet{}, _} = Endpoint.get(endpoint)
end
end
end
| 26.536585 | 67 | 0.670956 |
732616676e260093c23d65c0db24f053dd28ceb7 | 1,597 | ex | Elixir | clients/books/lib/google_api/books/v1/model/dictlayerdata_dict_words_senses_definitions_examples_source.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/books/lib/google_api/books/v1/model/dictlayerdata_dict_words_senses_definitions_examples_source.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/books/lib/google_api/books/v1/model/dictlayerdata_dict_words_senses_definitions_examples_source.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Books.V1.Model.DictlayerdataDictWordsSensesDefinitionsExamplesSource do
@moduledoc """
## Attributes
* `attribution` (*type:* `String.t`, *default:* `nil`) -
* `url` (*type:* `String.t`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:attribution => String.t(),
:url => String.t()
}
field(:attribution)
field(:url)
end
defimpl Poison.Decoder,
for: GoogleApi.Books.V1.Model.DictlayerdataDictWordsSensesDefinitionsExamplesSource do
def decode(value, options) do
GoogleApi.Books.V1.Model.DictlayerdataDictWordsSensesDefinitionsExamplesSource.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Books.V1.Model.DictlayerdataDictWordsSensesDefinitionsExamplesSource do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 29.036364 | 91 | 0.727614 |
73261e5d7c5e208c7bae24ddf708445c3dfcb19d | 1,882 | ex | Elixir | lib/timex/types.ex | chungwong/timex | bcd2504119f5c11ada7455d19726b5a49254dabf | [
"MIT"
] | null | null | null | lib/timex/types.ex | chungwong/timex | bcd2504119f5c11ada7455d19726b5a49254dabf | [
"MIT"
] | null | null | null | lib/timex/types.ex | chungwong/timex | bcd2504119f5c11ada7455d19726b5a49254dabf | [
"MIT"
] | null | null | null | defmodule Timex.Types do
# Date types
@type year :: Calendar.year
@type month :: Calendar.month
@type day :: Calendar.day
@type num_of_days :: 28..31
@type daynum :: 1..366
@type week_of_month :: 1..5
@type weekday :: 1..7
@type weeknum :: 1..53
# Time types
@type hour :: Calendar.hour
@type minute :: Calendar.minute
@type second :: Calendar.second
@type microsecond :: Calendar.microsecond
@type timestamp :: {megaseconds, seconds, microseconds }
@type megaseconds :: non_neg_integer
@type seconds :: non_neg_integer
@type microseconds :: non_neg_integer
# Timezone types
@type time_zone :: Calendar.time_zone
@type zone_abbr :: Calendar.zone_abbr
@type utc_offset :: Calendar.utc_offset
@type std_offset :: Calendar.std_offset
@type tz_offset :: -14..12
@type valid_timezone :: String.t | tz_offset | :utc | :local
# Complex types
@type weekday_name :: :monday | :tuesday | :wednesday | :thursday | :friday | :saturday | :sunday
@type shift_units :: :milliseconds | :seconds | :minutes | :hours | :days | :weeks | :years
@type time_units :: :microsecond | :microseconds | :millisecond | :milliseconds | :second | :seconds | :minute | :minutes | :hour | :hours | :day | :days | :week | :weeks | :year | :years
@type time :: { hour, minute, second }
@type microsecond_time :: { hour, minute, second, microsecond | microseconds}
@type date :: { year, month, day }
@type datetime :: { date, time }
@type microsecond_datetime :: { date, microsecond_time }
@type iso_triplet :: { year, weeknum, weekday }
@type calendar_types :: Date.t | DateTime.t | NaiveDateTime.t | Time.t
@type valid_datetime :: Date.t | DateTime.t | NaiveDateTime.t | Time.t | datetime | date | microsecond_datetime
@type valid_date :: Date.t | date
@type valid_time :: Time.t | time
@type weekstart :: weekday | binary | atom
end
| 42.772727 | 189 | 0.678533 |
73261e7fd52b32bd0f5caf695bc9c8c56329d60e | 1,105 | exs | Elixir | config/test.exs | getong/phoenix_example | cec4ba1ab2d601a3d4c709d3b77b1284f28ae36b | [
"Apache-2.0"
] | null | null | null | config/test.exs | getong/phoenix_example | cec4ba1ab2d601a3d4c709d3b77b1284f28ae36b | [
"Apache-2.0"
] | 6 | 2020-07-20T14:24:28.000Z | 2022-03-29T02:25:55.000Z | config/test.exs | getong/phoenix_example | cec4ba1ab2d601a3d4c709d3b77b1284f28ae36b | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
config :phoenix_example, PhoenixExample.PostgresRepo,
username: "postgres",
password: "postgres",
database: "phoenix_example_test#{System.get_env("MIX_TEST_PARTITION")}",
hostname: "localhost",
port: 5432,
pool: Ecto.Adapters.SQL.Sandbox
config :phoenix_example, PhoenixExample.MysqlRepo,
username: "root",
password: "zan3Kie1",
database: "test_db",
hostname: "localhost",
show_sensitive_data_on_connection_error: true,
port: 3306
config :phoenix_example, :mongodb_info,
url: "127.0.0.1",
username: "mongoadmin",
password: "Iushahb0",
port: 27017,
database: "admin",
name: :mongo,
pool_size: 2
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :phoenix_example, PhoenixExampleWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 26.95122 | 74 | 0.742986 |
732635e114c0d40954fb305479463dd06d19c6bc | 789 | exs | Elixir | test/storage/connection_string_test.exs | nulian/ex_microsoft_azure_storage | b147de024ceb1fd21d617863a9c53c41263700a6 | [
"MIT"
] | null | null | null | test/storage/connection_string_test.exs | nulian/ex_microsoft_azure_storage | b147de024ceb1fd21d617863a9c53c41263700a6 | [
"MIT"
] | null | null | null | test/storage/connection_string_test.exs | nulian/ex_microsoft_azure_storage | b147de024ceb1fd21d617863a9c53c41263700a6 | [
"MIT"
] | null | null | null | defmodule ExMicrosoftAzureStorage.Storage.ConnectionStringTest do
@moduledoc false
use ExUnit.Case, async: true
import ExMicrosoftAzureStorage.Factory
alias ExMicrosoftAzureStorage.Storage.ConnectionString
describe "parse" do
test "parses a connection string" do
default_endpoints_protocol = "https"
account_name = "my_account_name"
account_key = "my_account_key"
endpoint_suffix = "my_endpoint_suffix"
attrs = %{
default_endpoints_protocol: default_endpoints_protocol,
account_name: account_name,
account_key: account_key,
endpoint_suffix: endpoint_suffix
}
connection_string = build(:connection_string, attrs)
assert attrs == ConnectionString.parse(connection_string)
end
end
end
| 26.3 | 65 | 0.73384 |
732668e9222f645ab0c59a25e86da0760428945e | 225 | exs | Elixir | test/vekil/vekil/form/doctest1_test.exs | ianrumford/plymio_vekil | 070ab783dc8f7747002df61704285947eea583a2 | [
"MIT"
] | null | null | null | test/vekil/vekil/form/doctest1_test.exs | ianrumford/plymio_vekil | 070ab783dc8f7747002df61704285947eea583a2 | [
"MIT"
] | null | null | null | test/vekil/vekil/form/doctest1_test.exs | ianrumford/plymio_vekil | 070ab783dc8f7747002df61704285947eea583a2 | [
"MIT"
] | null | null | null | defmodule PlymioVekilFormDoctest1Test do
use ExUnit.Case, async: true
use PlymioVekilHelperTest
import Harnais.Helper
import Plymio.Vekil.Form
alias Plymio.Vekil.Form, as: VEKILFORM
doctest Plymio.Vekil.Form
end
| 22.5 | 40 | 0.8 |
73266a852cc397fb9ad23265b286e0b4e8145a52 | 706 | ex | Elixir | lib/mastery/boundary/quiz_validator.ex | grekko/mastery | f80dfcb660f23187970442ea237e4128ce3ac262 | [
"MIT"
] | null | null | null | lib/mastery/boundary/quiz_validator.ex | grekko/mastery | f80dfcb660f23187970442ea237e4128ce3ac262 | [
"MIT"
] | null | null | null | lib/mastery/boundary/quiz_validator.ex | grekko/mastery | f80dfcb660f23187970442ea237e4128ce3ac262 | [
"MIT"
] | null | null | null | defmodule Mastery.Boundary.QuizValidator do
import Mastery.Boundary.Validator
def errors(fields) when is_map(fields) do
[]
|> require(fields, :title, &validate_title/1)
|> optional(fields, :mastery, &validate_mastery/1)
end
def errors(_fields), do: [{nil, "A map of fields is required"}]
def validate_title(title) when is_binary(title) do
check(String.match?(title, ~r{\S}), {:error, "can't be blank"})
end
def validate_title(_title), do: {:error, "must be a string"}
def validate_mastery(mastery) when is_integer(mastery) do
check(mastery >= 1, {:error, "must be greater than zero"})
end
def validate_mastery(_mastery), do: {:error, "must be an integer"}
end
| 29.416667 | 68 | 0.692635 |
732675003fa1079b42955730c7bc0737f119100e | 970 | ex | Elixir | installer/templates/phx_test/support/channel_case.ex | G3z/phoenix | f13fe2c7f7ec25e6a59204266cb8cbbe7ffbbded | [
"MIT"
] | 2 | 2016-11-01T15:01:48.000Z | 2016-11-01T15:07:20.000Z | installer/templates/phx_test/support/channel_case.ex | G3z/phoenix | f13fe2c7f7ec25e6a59204266cb8cbbe7ffbbded | [
"MIT"
] | 1 | 2021-11-17T12:10:06.000Z | 2021-11-24T12:53:45.000Z | installer/templates/phx_test/support/channel_case.ex | G3z/phoenix | f13fe2c7f7ec25e6a59204266cb8cbbe7ffbbded | [
"MIT"
] | null | null | null | defmodule <%= web_namespace %>.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint <%= endpoint_module %>
end
end<%= if ecto do %>
setup tags do
<%= adapter_config[:test_setup] %>
unless tags[:async] do
<%= adapter_config[:test_async] %>
end
:ok
end<% else %>
setup _tags do
:ok
end<% end %>
end
| 23.095238 | 59 | 0.681443 |
73268764faffba6a940e84d338635f20718977bb | 1,494 | ex | Elixir | lib/core/domain/ports/commands.ex | giusdp/funless-core | d64570549ef0bd4376b1d16096033aca90042bef | [
"Apache-2.0"
] | null | null | null | lib/core/domain/ports/commands.ex | giusdp/funless-core | d64570549ef0bd4376b1d16096033aca90042bef | [
"Apache-2.0"
] | null | null | null | lib/core/domain/ports/commands.ex | giusdp/funless-core | d64570549ef0bd4376b1d16096033aca90042bef | [
"Apache-2.0"
] | 1 | 2022-03-24T12:05:11.000Z | 2022-03-24T12:05:11.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
defmodule Core.Domain.Ports.Commands do
@moduledoc """
Port for sending commands to workers.
"""
@type ivk_params :: %{:name => String.t()}
@type worker :: Atom.t()
@adapter :core |> Application.compile_env!(__MODULE__) |> Keyword.fetch!(:adapter)
@callback send_invocation_command(worker, ivk_params) ::
{:ok, name: String.t()} | {:error, message: String.t()}
@doc """
Sends an invocation command to a worker.
It requires a worker (a fully qualified name of another node with the :worker actor on),
and invocation parameteres (a map with a "name" key for the function name to invoke).
"""
defdelegate send_invocation_command(worker, ivk_params), to: @adapter
end
| 40.378378 | 90 | 0.732932 |
7326954d171d51f5db73f9684fb01946ddbf8508 | 2,631 | ex | Elixir | lib/console_web/controllers/packet_purchaser/organization_controller.ex | helium/roaming-console | 0157d0f1666f50259d2887ed23f6bc5138ce67b6 | [
"Apache-2.0"
] | null | null | null | lib/console_web/controllers/packet_purchaser/organization_controller.ex | helium/roaming-console | 0157d0f1666f50259d2887ed23f6bc5138ce67b6 | [
"Apache-2.0"
] | 14 | 2022-03-02T17:01:59.000Z | 2022-03-30T17:45:47.000Z | lib/console_web/controllers/packet_purchaser/organization_controller.ex | helium/roaming-console | 0157d0f1666f50259d2887ed23f6bc5138ce67b6 | [
"Apache-2.0"
] | null | null | null | defmodule ConsoleWeb.PacketPurchaser.OrganizationController do
use ConsoleWeb, :controller
alias Console.Organizations
alias Console.Memos
alias Console.Memos.Memo
alias Console.DcPurchases
alias Console.DcPurchases.DcPurchase
action_fallback(ConsoleWeb.FallbackController)
def index(conn, _) do
organizations = Organizations.get_all()
render(conn, "index.json", organizations: organizations)
end
def show(conn, %{"id" => id}) do
organization = Organizations.get_organization!(id)
render(conn, "show.json", organization: organization)
end
def burned_dc(conn, %{"memo" => memo_number, "dc_amount" => amount, "hnt_amount" => cost}) do
memo_txt = memo_number |> :binary.encode_unsigned(:little) |> :base64.encode()
case Memos.get_memo(memo_txt) do
%Memo{} = memo ->
attrs = %{
"dc_purchased" => amount,
"cost" => cost,
"card_type" => "burn",
"last_4" => "burn",
"user_id" => "HNT Burn",
"organization_id" => memo.organization_id,
"payment_id" => memo.memo,
}
case DcPurchases.get_by_payment_id(memo.memo) do
nil ->
organization = Organizations.get_organization!(memo.organization_id)
with {:ok, %DcPurchase{} = _dc_purchase } <- DcPurchases.create_dc_purchase_update_org(attrs, organization) do
ConsoleWeb.Endpoint.broadcast("graphql:dc_index", "graphql:dc_index:#{organization.id}:update_dc", %{})
ConsoleWeb.Endpoint.broadcast("graphql:dc_purchases_table", "graphql:dc_purchases_table:#{organization.id}:update_dc_table", %{})
{:ok, organization} = Organizations.update_organization(organization, %{ "received_free_dc" => false })
ConsoleWeb.DataCreditController.broadcast_packet_purchaser_refill_dc_balance(organization)
conn |> send_resp(:no_content, "")
end
_ ->
conn |> send_resp(:no_content, "")
end
nil ->
{:error, :not_found, "An organization with that memo was not found"}
end
end
def manual_update_packet_purchaser_dc(conn, %{"organization_id" => organization_id, "amount" => amount}) do
organization = Organizations.get_organization!(organization_id)
attrs = %{ dc_balance: amount, dc_balance_nonce: organization.dc_balance_nonce + 1 }
with {:ok, organization} <- Organizations.update_organization(organization, attrs) do
ConsoleWeb.DataCreditController.broadcast_packet_purchaser_refill_dc_balance(organization)
conn |> send_resp(:no_content, "")
end
end
end
| 36.541667 | 143 | 0.671988 |
7326967bd38ba494296222f1420068e2fbe834f8 | 9,398 | ex | Elixir | lib/teiserver/startup.ex | marseel/teiserver | 7e085ae7853205d217183737d3eb69a4941bbe7e | [
"MIT"
] | null | null | null | lib/teiserver/startup.ex | marseel/teiserver | 7e085ae7853205d217183737d3eb69a4941bbe7e | [
"MIT"
] | null | null | null | lib/teiserver/startup.ex | marseel/teiserver | 7e085ae7853205d217183737d3eb69a4941bbe7e | [
"MIT"
] | null | null | null | defmodule Teiserver.Startup do
use CentralWeb, :startup
require Logger
alias Teiserver.{Account, User}
@spec startup :: :ok
def startup do
start_time = System.system_time(:millisecond)
add_permission_set("teiserver", "admin", ~w(account battle clan queue))
add_permission_set("teiserver", "moderator", ~w(account battle clan queue telemetry))
add_permission_set("teiserver", "api", ~w(battle))
add_permission_set("teiserver", "player", ~w(account tester contributor dev streamer donor verified bot moderator))
add_group_type("Teiserver clan", %{fields: []})
# Example site configs
add_site_config_type(%{
key: "teiserver.Require Chobby login",
section: "Registrations",
type: "boolean",
permissions: ["admin.dev.developer"],
description: "Prevents users registering with anything other than Chobby",
opts: [],
default: false
})
add_site_config_type(%{
key: "teiserver.Bridge from discord",
section: "Discord",
type: "boolean",
permissions: ["teiserver.moderator"],
description: "Enables bridging from discord to in-lobby channels",
opts: [],
default: true
})
add_site_config_type(%{
key: "teiserver.Bridge from server",
section: "Discord",
type: "boolean",
permissions: ["teiserver.moderator"],
description: "Enables bridging from in-lobby channels to discord",
opts: [],
default: true
})
umbrella_group =
case Central.Account.get_group(nil, search: [name: "Teiserver umbrella group"]) do
nil ->
{:ok, group} =
Central.Account.create_group(%{
"name" => "Teiserver umbrella group",
"active" => true,
"icon" => "fa-duotone fa-umbrella",
"colour" => "#00AA66",
"data" => %{}
})
group
group ->
group
end
player_group =
case Central.Account.get_group(nil, search: [name: "Teiserver Users"]) do
nil ->
{:ok, group} =
Central.Account.create_group(%{
"name" => "Teiserver Users",
"active" => true,
"icon" => "fa-duotone fa-robot",
"colour" => "#00AA00",
"data" => %{},
"super_group_id" => umbrella_group.id
})
group
group ->
group
end
internal_group =
case Central.Account.get_group(nil, search: [name: "Teiserver Internal Processes"]) do
nil ->
{:ok, group} =
Central.Account.create_group(%{
"name" => "Teiserver Internal Processes",
"active" => true,
"icon" => "fa-duotone fa-microchip",
"colour" => "#660066",
"data" => %{},
"super_group_id" => umbrella_group.id
})
group
group ->
group
end
ConCache.put(:application_metadata_cache, "teiserver_umbrella_group", umbrella_group.id)
ConCache.put(:application_metadata_cache, "teiserver_user_group", player_group.id)
ConCache.put(:application_metadata_cache, "teiserver_internal_group", internal_group.id)
Central.Account.GroupCacheLib.update_caches(player_group)
Central.Account.GroupCacheLib.update_caches(internal_group)
Central.Account.GroupCacheLib.update_caches(umbrella_group)
# Quick actions
QuickAction.add_items([
# General pages
%{
label: "Friends/Mutes/Invites",
icons: [Teiserver.icon(:relationship), :list],
url: "/teiserver/account/relationships",
permissions: "teiserver"
},
%{
label: "Teiserver live metrics",
icons: ["far fa-tachometer-alt", :list],
url: "/teiserver/admin/metrics",
permissions: "logging.live"
},
%{
label: "Clans",
icons: [Teiserver.Clans.ClanLib.icon(), :list],
url: "/teiserver/account/clans",
permissions: "teiserver"
},
%{
label: "Battles",
icons: [Teiserver.Battle.LobbyLib.icon(), :list],
url: "/teiserver/battle/lobbies",
permissions: "teiserver"
},
# Mod pages
%{
label: "Clients",
icons: [Teiserver.ClientLib.icon(), :list],
url: "/teiserver/admin/client",
permissions: "teiserver.moderator"
},
%{
label: "Live queues",
icons: [Teiserver.Game.QueueLib.icon(), :list],
url: "/teiserver/admin_live/queues",
permissions: "teiserver.moderator"
},
%{
label: "Teiserver users",
icons: [Teiserver.ClientLib.icon(), :list],
input: "s",
method: "get",
placeholder: "Search username",
url: "/teiserver/admin/users/search",
permissions: "teiserver.moderator"
},
# %{label: "Parties", icons: [Teiserver.ClientLib.icon(), :list], url: "/teiserver/admin/parties", permissions: "teiserver.moderator"},
%{
label: "Clan admin",
icons: [Teiserver.Clans.ClanLib.icon(), :list],
url: "/teiserver/admin/clans",
permissions: "teiserver.moderator"
},
# Admin pages
%{
label: "Teiserver dashboard",
icons: ["fa-regular fa-tachometer-alt", :list],
url: "/logging/live/dashboard/metrics?nav=teiserver",
permissions: "logging.live.show"
},
%{
label: "Teiserver client events",
icons: ["fa-regular #{Teiserver.Telemetry.ClientEventLib.icon()}", :list],
url: "/teiserver/reports/client_events/summary",
permissions: "teiserver.admin"
},
%{
label: "Teiserver server metrics",
icons: ["fa-regular #{Teiserver.Telemetry.ServerDayLogLib.icon()}", :list],
url: "/teiserver/reports/client_events/summary",
permissions: "teiserver.admin"
},
%{
label: "Teiserver match metrics",
icons: ["fa-regular #{Teiserver.Battle.MatchLib.icon()}", :list],
url: "/teiserver/reports/client_events/summary",
permissions: "teiserver.admin"
},
%{
label: "Teiserver infologs",
icons: ["fa-regular #{Teiserver.Telemetry.InfologLib.icon()}", :list],
url: "/teiserver/reports/client_events/summary",
permissions: "teiserver.moderator.telemetry"
},
%{
label: "Teiserver reports",
icons: ["fa-regular #{Central.Helpers.StylingHelper.icon(:report)}", :list],
url: "/teiserver/reports/client_events/summary",
permissions: "teiserver.admin"
}
])
# User configs
add_user_config_type(%{
key: "teiserver.Show flag",
section: "Teiserver account",
type: "boolean",
visible: true,
permissions: ["teiserver"],
description:
"When checked the flag associated with your IP will be displayed. If unchecked your flag will be blank. This will take effect next time you login with your client.",
opts: [],
default: true
})
ConCache.put(:lists, :clients, [])
ConCache.put(:lists, :rooms, [])
ConCache.insert_new(:lists, :lobbies, [])
# We tried having a random lobby id start number to help prevent people joining
# ongoing games but it didn't work
# # We were using :rand.uniform() but it wasn't being random
# # since we don't care about random random we're okay with this!
# bid = :erlang.system_time()
# |> to_string
# |> String.reverse()
# |> String.slice(0..5)
# |> String.to_integer()
ConCache.put(:id_counters, :battle, 1)
User.pre_cache_users(:active)
Teiserver.Data.Matchmaking.pre_cache_queues()
springids = Account.list_users(order_by: "Newest first")
|> Enum.map(fn u -> Central.Helpers.NumberHelper.int_parse(u.data["springid"]) end)
# We do this as a separate operation because a blank DB won't have any springids yet
current_springid = Enum.max([0] ++ springids)
ConCache.put(:id_counters, :springid, current_springid + 1)
ConCache.put(:application_metadata_cache, "teiserver_startup_completed", true)
ConCache.put(:application_metadata_cache, "teiserver_day_metrics_today_last_time", nil)
ConCache.put(:application_metadata_cache, "teiserver_day_metrics_today_cache", true)
# User.pre_cache_users(:remaining)
Teiserver.Telemetry.startup()
if Application.get_env(:central, Teiserver)[:enable_match_monitor] do
spawn(fn ->
:timer.sleep(200)
Teiserver.Battle.start_match_monitor()
end)
end
if Application.get_env(:central, Teiserver)[:enable_coordinator_mode] do
spawn(fn ->
:timer.sleep(200)
Teiserver.Coordinator.start_coordinator()
end)
end
if Application.get_env(:central, Teiserver)[:enable_accolade_mode] do
spawn(fn ->
:timer.sleep(200)
Teiserver.Account.AccoladeLib.start_accolade_server()
end)
end
# We want this to start up later than the coordinator
if Application.get_env(:central, Teiserver)[:enable_agent_mode] do
spawn(fn ->
:timer.sleep(650)
Teiserver.agent_mode()
end)
end
time_taken = System.system_time(:millisecond) - start_time
Logger.info("Teiserver startup complete, took #{time_taken}ms")
end
end
| 32.406897 | 173 | 0.60715 |
732696b3f60ff257655cf8a90c36642af70ad19b | 1,948 | ex | Elixir | lib/gen_socket_client/serializer.ex | cabol/phoenix_gen_socket_client | bbc8884288bd747e1d7e8ce96a6189d8c24ff01e | [
"MIT"
] | 169 | 2016-04-08T10:47:43.000Z | 2021-01-19T15:37:34.000Z | lib/gen_socket_client/serializer.ex | cabol/phoenix_gen_socket_client | bbc8884288bd747e1d7e8ce96a6189d8c24ff01e | [
"MIT"
] | 36 | 2016-04-07T13:28:02.000Z | 2021-01-27T08:16:35.000Z | lib/gen_socket_client/serializer.ex | cabol/phoenix_gen_socket_client | bbc8884288bd747e1d7e8ce96a6189d8c24ff01e | [
"MIT"
] | 42 | 2016-04-08T14:31:33.000Z | 2021-01-21T09:24:51.000Z | defmodule Phoenix.Channels.GenSocketClient.Serializer do
@moduledoc """
Describes the serializer interface used in `Phoenix.Channels.GenSocketClient` to encode/decode messages.
"""
alias Phoenix.Channels.GenSocketClient
@doc "Invoked to decode the raw message."
@callback decode_message(GenSocketClient.encoded_message(), Keyword.t()) ::
GenSocketClient.message()
@doc "Invoked to encode a socket message."
@callback encode_message(GenSocketClient.message()) ::
{:ok, Phoenix.Channels.GenSocketClient.Transport.frame()} | {:error, reason :: any}
end
defmodule Phoenix.Channels.GenSocketClient.Serializer.Json do
@moduledoc "Json serializer for the socket client."
@behaviour Phoenix.Channels.GenSocketClient.Serializer
# -------------------------------------------------------------------
# Phoenix.Channels.GenSocketClient.Serializer callbacks
# -------------------------------------------------------------------
@doc false
def decode_message(encoded_message, _opts), do: Jason.decode!(encoded_message)
@doc false
def encode_message(message) do
case Jason.encode(message) do
{:ok, encoded} -> {:ok, {:text, encoded}}
error -> error
end
end
end
defmodule Phoenix.Channels.GenSocketClient.Serializer.GzipJson do
@moduledoc "Gzip+Json serializer for the socket client."
@behaviour Phoenix.Channels.GenSocketClient.Serializer
# -------------------------------------------------------------------
# Phoenix.Channels.GenSocketClient.Serializer callbacks
# -------------------------------------------------------------------
@doc false
def decode_message(encoded_message, _opts) do
encoded_message
|> :zlib.gunzip()
|> Jason.decode!()
end
@doc false
def encode_message(message) do
case Jason.encode_to_iodata(message) do
{:ok, encoded} -> {:ok, {:binary, :zlib.gzip(encoded)}}
error -> error
end
end
end
| 32.466667 | 106 | 0.620637 |
7326b7aca1e48e45f3fa95316804755634348c21 | 454 | ex | Elixir | lib/quantum/node_selector_broadcaster/start_opts.ex | kianmeng/quantum-core | 24997fb649d778f654c1adad0006f7ad529a1184 | [
"Apache-2.0"
] | 1,092 | 2018-03-23T02:29:33.000Z | 2022-03-30T19:17:30.000Z | lib/quantum/node_selector_broadcaster/start_opts.ex | kianmeng/quantum-core | 24997fb649d778f654c1adad0006f7ad529a1184 | [
"Apache-2.0"
] | 168 | 2018-03-22T12:52:28.000Z | 2022-03-19T22:49:17.000Z | lib/quantum/node_selector_broadcaster/start_opts.ex | kianmeng/quantum-core | 24997fb649d778f654c1adad0006f7ad529a1184 | [
"Apache-2.0"
] | 79 | 2018-03-22T12:50:24.000Z | 2022-03-07T08:40:50.000Z | defmodule Quantum.NodeSelectorBroadcaster.StartOpts do
@moduledoc false
# Start Options for Quantum.NodeSelectorBroadcaster
@type t :: %__MODULE__{
name: GenServer.server(),
execution_broadcaster_reference: GenServer.server(),
task_supervisor_reference: GenServer.server()
}
@enforce_keys [
:name,
:execution_broadcaster_reference,
:task_supervisor_reference
]
defstruct @enforce_keys
end
| 23.894737 | 62 | 0.715859 |
7327099fbad814f4b1afba14ff11b485aabc4e7e | 1,358 | exs | Elixir | test/integration/ice_negotiation_test.exs | livinginthepast/specter | 65ab35ace7bf34074a58d0ed27a14ddd6bf97034 | [
"MIT"
] | 8 | 2022-02-25T09:53:57.000Z | 2022-03-28T12:31:56.000Z | test/integration/ice_negotiation_test.exs | livinginthepast/specter | 65ab35ace7bf34074a58d0ed27a14ddd6bf97034 | [
"MIT"
] | 10 | 2022-03-26T14:40:12.000Z | 2022-03-30T17:05:53.000Z | test/integration/ice_negotiation_test.exs | livinginthepast/specter | 65ab35ace7bf34074a58d0ed27a14ddd6bf97034 | [
"MIT"
] | 1 | 2022-03-25T21:30:53.000Z | 2022-03-25T21:30:53.000Z | defmodule Test.Integration.IceNegotiationTest do
use SpecterTest.Case
describe "on_ice_candidate" do
setup [:initialize_specter, :init_api, :init_peer_connection]
test "sends candidates as they are generated", %{specter: specter, peer_connection: pc_offer} do
api = init_api(specter)
pc_answer = init_peer_connection(specter, api)
assert :ok = Specter.PeerConnection.create_data_channel(specter, pc_offer, "foo")
assert_receive {:data_channel_created, ^pc_offer}
assert :ok = Specter.PeerConnection.create_offer(specter, pc_offer)
assert_receive {:offer, ^pc_offer, offer}
assert :ok = Specter.PeerConnection.set_local_description(specter, pc_offer, offer)
assert_receive {:ok, ^pc_offer, :set_local_description}
assert :ok = Specter.PeerConnection.set_remote_description(specter, pc_answer, offer)
assert_receive {:ok, ^pc_answer, :set_remote_description}
assert :ok = Specter.PeerConnection.create_answer(specter, pc_answer)
assert_receive {:answer, ^pc_answer, answer}
assert :ok = Specter.PeerConnection.set_local_description(specter, pc_answer, answer)
assert_receive {:ok, ^pc_answer, :set_local_description}
assert_receive {:ice_candidate, ^pc_offer, _candidate}
assert_receive {:ice_candidate, ^pc_answer, _candidate}
end
end
end
| 43.806452 | 100 | 0.74595 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.