hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7a501c7c871ed3d9a24a041b847c535ff810eef | 12,032 | exs | Elixir | test/ecto_adapters_dynamodb_test.exs | noonie2k/ecto_adapters_dynamodb | 199c14348f5787b04b4eb997a1f00d13cc09948c | [
"Apache-2.0"
] | null | null | null | test/ecto_adapters_dynamodb_test.exs | noonie2k/ecto_adapters_dynamodb | 199c14348f5787b04b4eb997a1f00d13cc09948c | [
"Apache-2.0"
] | null | null | null | test/ecto_adapters_dynamodb_test.exs | noonie2k/ecto_adapters_dynamodb | 199c14348f5787b04b4eb997a1f00d13cc09948c | [
"Apache-2.0"
] | 1 | 2018-09-12T13:11:05.000Z | 2018-09-12T13:11:05.000Z | defmodule Ecto.Adapters.DynamoDB.Test do
use ExUnit.Case
import Ecto.Query
alias Ecto.Adapters.DynamoDB.TestRepo
alias Ecto.Adapters.DynamoDB.TestSchema.Person
alias Ecto.Adapters.DynamoDB.TestSchema.Address
alias Ecto.Adapters.DynamoDB.TestSchema.BookPage
@test_table "test_person"
setup_all do
TestHelper.setup_all()
end
describe "Repo.insert/1" do
test "insert a single record" do
{:ok, result} = TestRepo.insert(%Person{
id: "person-hello",
circles: nil,
first_name: "Hello",
last_name: "World",
age: 34,
email: "[email protected]",
password: "password",
})
assert result == %Ecto.Adapters.DynamoDB.TestSchema.Person{
age: 34,
circles: nil,
email: "[email protected]",
first_name: "Hello",
id: "person-hello",
last_name: "World",
password: "password",
__meta__: %Ecto.Schema.Metadata{
context: nil,
source: {nil, @test_table},
state: :loaded,
},
}
end
test "insert embedded records" do
address_list = [
%Address{
street_number: 245,
street_name: "W 17th St"
},
%Address{
street_number: 1385,
street_name: "Broadway"
}
]
{:ok, result} = TestRepo.insert(%Person{
id: "person:address_test",
email: "[email protected]",
addresses: address_list
})
assert length(result.addresses) == 2
end
end
describe "Repo.get/2" do
test "Repo.get/2 - no matching record" do
result = TestRepo.get(Person, "person-faketestperson")
assert result == nil
end
test "insert a record and retrieve it by its primary key" do
TestRepo.insert(%Person{
id: "person-john",
circles: nil,
first_name: "John",
last_name: "Lennon",
age: 40,
email: "[email protected]",
password: "password",
role: "musician"
})
result = TestRepo.get(Person, "person-john")
assert result.first_name == "John"
assert result.last_name == "Lennon"
end
test "insert a record and get with a hash/range pkey" do
name = "houseofleaves"
page1 = %BookPage{
id: name,
page_num: 1,
text: "abc",
}
page2 = %BookPage{
id: name,
page_num: 2,
text: "def",
}
cs1 = BookPage.changeset(page1)
cs2 = BookPage.changeset(page2)
duplicate_page_cs = BookPage.changeset(%BookPage{
id: name,
page_num: 1,
text: "ghi",
})
{:ok, _} = TestRepo.insert(cs1)
{:ok, _} = TestRepo.insert(cs2)
{:error, _} = TestRepo.insert(duplicate_page_cs)
query = from p in BookPage, where: p.id == ^name
results = query |> TestRepo.all |> Enum.sort_by(&(&1.page_num))
[res1, res2] = results
assert res1 == page1
assert res2 == page2
query1 = from p in BookPage, where: p.id == ^name and p.page_num == 1
query2 = from p in BookPage, where: p.id == ^name and p.page_num == 2
assert [page1] == TestRepo.all(query1)
assert [page2] == TestRepo.all(query2)
end
end
describe "Repo.insert_all/2" do
test "batch-insert multiple records" do
total_records = 5
result = handle_batch_insert(total_records)
assert result == {total_records, nil}
end
test "batch-insert a single record" do
total_records = 1
result = handle_batch_insert(total_records)
assert result == {total_records, nil}
end
# DynamoDB has a constraint on the call to BatchWriteItem, where attempts to insert more than
# 25 records will be rejected. We allow the user to call insert_all() for more than 25 records
# by breaking up the requests into blocks of 25.
# https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html
test "batch-insert multiple records, exceeding BatchWriteItem limit by 30 records" do
total_records = 55
result = handle_batch_insert(total_records)
assert result == {total_records, nil}
end
end
describe "Repo.all" do
test "batch-get multiple records when querying for a hard-coded list" do
person1 = %{
id: "person-jimi",
circles: nil,
first_name: "Jimi",
last_name: "Hendrix",
age: 27,
email: "[email protected]",
password: "password",
}
person2 = %{
id: "person-noel",
circles: nil,
first_name: "Noel",
last_name: "Redding",
age: 72,
email:
"[email protected]",
password: "password",
}
person3 = %{
id: "person-mitch",
circles: nil,
first_name: "Mitch",
last_name: "Mitchell",
age: 74,
email: "[email protected]",
password: "password",
}
TestRepo.insert_all(Person, [person1, person2, person3])
result = TestRepo.all(from p in Person, where: p.id in ["person-jimi", "person-noel", "person-mitch"])
|> Enum.map(&(&1.id))
assert Enum.sort(result) == Enum.sort(["person-jimi", "person-noel", "person-mitch"])
end
test "batch-get multiple records when querying for an interpolated list" do
person1 = %{
id: "person-moe",
circles: nil,
first_name: "Moe",
last_name: "Howard",
age: 75,
email: "[email protected]",
password: "password",
}
person2 = %{
id: "person-larry",
circles: nil,
first_name: "Larry",
last_name: "Fine",
age: 72,
email: "[email protected]",
password: "password",
}
person3 = %{
id: "person-curly",
circles: nil,
first_name: "Curly",
last_name: "Howard",
age: 74,
email: "[email protected]",
password: "password",
}
TestRepo.insert_all(Person, [person1, person2, person3])
person_ids = [person1.id, person2.id, person3.id]
result = TestRepo.all(from p in Person, where: p.id in ^person_ids)
|> Enum.map(&(&1.id))
assert Enum.sort(result) == Enum.sort(person_ids)
end
test "batch-insert and query all on a single-condition global secondary index" do
person1 = %{
id: "person-tomtest",
circles: nil,
first_name: "Tom",
last_name: "Jones",
age: 70,
email: "[email protected]",
password: "password",
}
person2 = %{
id: "person-caseytest",
circles: nil,
first_name: "Casey",
last_name: "Jones",
age: 114,
email: "[email protected]",
password: "password",
}
person3 = %{
id: "person-jamestest",
circles: nil,
first_name: "James",
last_name: "Jones",
age: 71,
email: "[email protected]",
password: "password",
}
TestRepo.insert_all(Person, [person1, person2, person3])
result = TestRepo.all(from p in Person, where: p.email == "[email protected]")
assert length(result) == 3
end
test "query all on a multi-condition primary key/global secondary index" do
TestRepo.insert(%Person{
id: "person:jamesholden",
first_name: "James",
last_name: "Holden",
email: "[email protected]",
})
result = TestRepo.all(from p in Person, where: p.id == "person:jamesholden" and p.email == "[email protected]")
assert Enum.at(result, 0).first_name == "James"
assert Enum.at(result, 0).last_name == "Holden"
end
end
describe "Repo.update/1" do
test "update two fields on a record" do
TestRepo.insert(%Person{
id: "person-update",
circles: nil,
first_name: "Update",
last_name: "Test",
age: 12,
email: "[email protected]",
password: "password",
})
{:ok, result} = TestRepo.get(Person, "person-update")
|> Ecto.Changeset.change([first_name: "Updated", last_name: "Tested"])
|> TestRepo.update()
assert result.first_name == "Updated"
assert result.last_name == "Tested"
end
end
describe "Repo.delete/1" do
test "delete a single record" do
person_id = "person:delete"
{:ok, _} = TestRepo.insert(%Person{
id: person_id,
email: "[email protected]",
})
|> elem(1)
|> TestRepo.delete()
assert TestRepo.get(Person, person_id) == nil
end
end
describe "Repo.delete_all/2" do
test "delete multiple records" do
TestRepo.insert(%Person{
id: "person:delete_all_1",
email: "[email protected]",
})
TestRepo.insert(%Person{
id: "person:delete_all_2",
email: "[email protected]",
})
assert nil != TestRepo.get(Person, "person:delete_all_1")
assert nil != TestRepo.get(Person, "person:delete_all_2")
result = TestRepo.delete_all((from p in Person, where: p.email == "[email protected]"), query_info_key: "delete_all:test_key")
assert {2, nil} == result
assert nil == TestRepo.get(Person, "person:delete_all_1")
assert nil == TestRepo.get(Person, "person:delete_all_2")
end
end
defp handle_batch_insert(total_records) do
people = make_list_of_people_for_batch_insert(total_records)
TestRepo.insert_all(Person, people)
end
defp make_list_of_people_for_batch_insert(total_records) do
for i <- 0..total_records, i > 0 do
id_string = :crypto.strong_rand_bytes(16) |> Base.url_encode64 |> binary_part(0, 16)
id = "person:" <> id_string
%{
id: id,
circles: nil,
first_name: "Batch",
last_name: "Insert",
age: i,
email: "batch_insert#{i}@test.com",
password: "password",
}
end
end
end
| 33.237569 | 134 | 0.480967 |
f7a5064aaf1a4b642e3714abd95094f1c09ca7d5 | 8,631 | exs | Elixir | test/game/command/shops_test.exs | jgsmith/ex_venture | 546adaa8fe80d45a72fde6de8d8d6906902c12d4 | [
"MIT"
] | 2 | 2019-05-14T11:36:44.000Z | 2020-07-01T08:54:04.000Z | test/game/command/shops_test.exs | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | null | null | null | test/game/command/shops_test.exs | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | 1 | 2021-01-29T14:12:40.000Z | 2021-01-29T14:12:40.000Z | defmodule Game.Command.ShopsTest do
use ExVenture.CommandCase
alias Game.Command.Shops
alias Game.Environment.State.Overworld
doctest Game.Command.Shops
setup do
start_and_clear_items()
insert_item(%{id: 1, name: "Sword", keywords: [], description: ""})
tree_stand = %{id: 10, name: "Tree Stand Shop", shop_items: [%{item_id: 1, price: 10, quantity: -1}]}
hole_wall = %{id: 11, name: "Hole in the Wall"}
start_shop(tree_stand)
start_room(%{shops: [tree_stand, hole_wall]})
%{state: session_state(%{}), tree_stand: tree_stand}
end
test "a bad shop command displays help", %{state: state} do
:ok = Shops.run({:help}, state)
assert_socket_echo "see {command}help shops{/command}"
end
test "view shops in the room", %{state: state} do
:ok = Shops.run({}, %{state | save: %{room_id: 1}})
assert_socket_echo ["tree stand shop", "hole in the wall"]
end
test "view shops in the room - overworld", %{state: state} do
start_room(%Overworld{id: "overworld:1:1,1"})
:ok = Shops.run({}, %{state | save: %{room_id: "overworld:1:1,1"}})
assert_socket_echo "no shops"
end
test "view shops in the room - no shops", %{state: state} do
start_room(%{shops: []})
:ok = Shops.run({}, %{state | save: %{room_id: 1}})
assert_socket_echo "no shops"
end
test "view items in a shop", %{state: state} do
:ok = Shops.run({:list, "tree stand"}, %{state | save: %{room_id: 1}})
assert_socket_echo ["tree stand shop", "sword"]
end
test "view items in a shop - one shop", %{state: state, tree_stand: tree_stand} do
start_room(%{shops: [tree_stand]})
:ok = Shops.run({:list}, %{state | save: %{room_id: 1}})
assert_socket_echo ["tree stand shop", "sword"]
end
test "view items in a shop - one shop - more than one shop", %{state: state} do
:ok = Shops.run({:list}, %{state | save: %{room_id: 1}})
assert_socket_echo "more than one shop"
end
test "view items in a shop - one shop - no shop found", %{state: state} do
start_room(%{shops: []})
:ok = Shops.run({:list}, %{state | save: %{room_id: 1}})
assert_socket_echo "could not"
end
test "view items in a shop - bad shop name", %{state: state} do
:ok = Shops.run({:list, "stand"}, %{state | save: %{room_id: 1}})
assert_socket_echo "could not be found"
end
test "view an item in a shop", %{state: state} do
:ok = Shops.run({:show, "sword", :from, "tree stand"}, %{state | save: %{room_id: 1}})
assert_socket_echo "sword"
end
test "view an item in a shop - item not found", %{state: state} do
:ok = Shops.run({:show, "shield", :from, "tree stand"}, %{state | save: %{room_id: 1}})
assert_socket_echo "could not"
end
test "view an item in a shop - one shop", %{state: state, tree_stand: tree_stand} do
start_room(%{shops: [tree_stand]})
:ok = Shops.run({:show, "sword"}, %{state | save: %{room_id: 1}})
assert_socket_echo "sword"
end
test "view an item in a shop - one shop - more than one shop", %{state: state} do
:ok = Shops.run({:show, "sword"}, %{state | save: %{room_id: 1}})
assert_socket_echo "more than one shop"
end
test "view an item in a shop - one shop - no shop found", %{state: state} do
start_room(%{shops: []})
:ok = Shops.run({:show, "sword"}, %{state | save: %{room_id: 1}})
assert_socket_echo "could not"
end
test "view an item in a shop - shop not found", %{state: state} do
:ok = Shops.run({:show, "sword", :from, "tre3"}, %{state | save: %{room_id: 1}})
assert_socket_echo "could not"
end
test "buy an item in a shop", %{state: state, tree_stand: tree_stand} do
save = %{base_save() | room_id: 1, currency: 20}
put_shop_buy_response(tree_stand, {:ok, %{save | currency: 19}, %{name: "Sword"}})
{:update, state} = Shops.run({:buy, "sword", :from, "tree stand"}, %{state | save: save})
assert state.save.currency == 19
assert_shop_buy {_, "sword", _}
assert_socket_echo ["tree stand shop", "sword"]
end
test "buy an item in a shop - shop not found", %{state: state} do
save = %{base_save() | room_id: 1, currency: 20}
:ok = Shops.run({:buy, "sword", :from, "treestand"}, %{state | save: save})
assert_socket_echo "could not"
end
test "buy an item in a shop - one shop", %{state: state, tree_stand: tree_stand} do
start_room(%{shops: [tree_stand]})
save = %{base_save() | room_id: 1, currency: 20}
put_shop_buy_response(tree_stand, {:ok, %{save | currency: 19}, %{name: "Sword"}})
{:update, state} = Shops.run({:buy, "sword"}, %{state | save: save})
assert state.save.currency == 19
assert_shop_buy {_, "sword", _}
assert_socket_echo ["tree stand", "sword"]
end
test "buy an item in a shop - one shop - but more than one shop in room", %{state: state} do
save = %{base_save() | room_id: 1, currency: 20}
:ok = Shops.run({:buy, "sword"}, %{state | save: save})
assert_socket_echo "more than one"
end
test "buy an item in a shop - one shop parse - no shop in room", %{state: state} do
start_room(%{shops: []})
save = %{base_save() | room_id: 1, currency: 20}
:ok = Shops.run({:buy, "sword"}, %{state | save: save})
assert_socket_echo "could not"
end
test "buy an item in a shop - item not found", %{state: state, tree_stand: tree_stand} do
put_shop_buy_response(tree_stand, {:error, :item_not_found})
save = %{base_save() | room_id: 1, currency: 20}
:ok = Shops.run({:buy, "swrd", :from, "tree stand"}, %{state | save: save})
assert_shop_buy {_, "swrd", _}
assert_socket_echo "item could not"
end
test "buy an item in a shop - not enough currency", %{state: state, tree_stand: tree_stand} do
put_shop_buy_response(tree_stand, {:error, :not_enough_currency, %{name: "Sword"}})
save = %{base_save() | room_id: 1, currency: 20}
:ok = Shops.run({:buy, "sword", :from, "tree stand"}, %{state | save: save})
assert_shop_buy {_, "sword", _}
assert_socket_echo "do not have"
end
test "buy an item in a shop - not enough quantity", %{state: state, tree_stand: tree_stand} do
put_shop_buy_response(tree_stand, {:error, :not_enough_quantity, %{name: "Sword"}})
save = %{base_save() | room_id: 1, currency: 20}
:ok = Shops.run({:buy, "sword", :from, "tree stand"}, %{state | save: save})
assert_shop_buy {_, "sword", _}
assert_socket_echo "does not"
end
test "sell an item to a shop", %{state: state, tree_stand: tree_stand} do
save = %{base_save() | room_id: 1, currency: 20, items: [item_instance(1)]}
put_shop_sell_response(tree_stand, {:ok, %{save | currency: 30}, %{name: "Sword", cost: 10}})
{:update, state} = Shops.run({:sell, "sword", :to, "tree stand"}, %{state | save: save})
assert state.save.currency == 30
assert_shop_sell {_, "sword", _}
assert_socket_echo "10 gold"
end
test "sell an item to a shop - one shop", %{state: state, tree_stand: tree_stand} do
start_room(%{shops: [tree_stand]})
save = %{base_save() | room_id: 1, currency: 20, items: [item_instance(1)]}
put_shop_sell_response(tree_stand, {:ok, %{save | currency: 30}, %{name: "Sword", cost: 10}})
{:update, state} = Shops.run({:sell, "sword"}, %{state | save: save})
assert state.save.currency == 30
assert_shop_sell {_, "sword", _}
assert_socket_echo "10 gold"
end
test "sell an item in a shop - one shop - but more than one shop in room", %{state: state} do
save = %{base_save() | room_id: 1, currency: 20}
:ok = Shops.run({:sell, "sword"}, %{state | save: save})
assert_socket_echo "more than one"
end
test "sell an item in a shop - one shop parse - no shop in room", %{state: state} do
start_room(%{shops: []})
save = %{base_save() | room_id: 1, currency: 20}
:ok = Shops.run({:sell, "sword"}, %{state | save: save})
assert_socket_echo "could not"
end
test "sell an item to a shop - shop not found", %{state: state} do
save = %{base_save() | room_id: 1, currency: 20, items: [item_instance(1)]}
:ok = Shops.run({:sell, "sword", :to, "treestand"}, %{state | save: save})
assert_socket_echo "could not"
end
test "sell an item to a shop - item not found", %{state: state, tree_stand: tree_stand} do
put_shop_sell_response(tree_stand, {:error, :item_not_found})
save = %{base_save() | room_id: 1, currency: 20, items: [item_instance(1)]}
:ok = Shops.run({:sell, "swrd", :to, "tree stand"}, %{state | save: save})
assert_shop_sell {_, "swrd", _}
assert_socket_echo "could not"
end
end
| 33.196154 | 105 | 0.625652 |
f7a51315b6f848a30c12113805ccae18f66b2bc3 | 930 | ex | Elixir | lib/ticketo/application.ex | alejandronanez/phoenix-absinthe-auth-template | e4e72f52247cb4c880ddc058beaa7b5eecb68980 | [
"MIT"
] | 8 | 2020-06-25T03:15:06.000Z | 2021-12-10T10:52:26.000Z | lib/ticketo/application.ex | alejandronanez/phoenix-absinthe-auth-template | e4e72f52247cb4c880ddc058beaa7b5eecb68980 | [
"MIT"
] | null | null | null | lib/ticketo/application.ex | alejandronanez/phoenix-absinthe-auth-template | e4e72f52247cb4c880ddc058beaa7b5eecb68980 | [
"MIT"
] | null | null | null | defmodule Ticketo.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
# List all child processes to be supervised
children = [
# Start the Ecto repository
Ticketo.Repo,
# Start the endpoint when the application starts
TicketoWeb.Endpoint
# Starts a worker by calling: Ticketo.Worker.start_link(arg)
# {Ticketo.Worker, arg},
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Ticketo.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
TicketoWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 29.0625 | 66 | 0.713978 |
f7a58c912b36ef75a6829c433974a30bd7492e13 | 4,635 | ex | Elixir | lib/combinators/date.ex | tmr08c/date_time_parser | 42a93ff734bc532f86ca3a0bf90b662ce3bfade8 | [
"MIT"
] | null | null | null | lib/combinators/date.ex | tmr08c/date_time_parser | 42a93ff734bc532f86ca3a0bf90b662ce3bfade8 | [
"MIT"
] | null | null | null | lib/combinators/date.ex | tmr08c/date_time_parser | 42a93ff734bc532f86ca3a0bf90b662ce3bfade8 | [
"MIT"
] | null | null | null | defmodule DateTimeParser.Combinators.Date do
@moduledoc false
import NimbleParsec
@days_map %{
"sun" => "Sunday",
"mon" => "Monday",
"tues" => "Tuesday",
"tue" => "Tuesday",
"wed" => "Wednesday",
"thurs" => "Thursday",
"thur" => "Thursday",
"thu" => "Thursday",
"fri" => "Friday",
"sat" => "Saturday"
}
@months_map %{
"january" => 1,
"february" => 2,
"march" => 3,
"april" => 4,
"may" => 5,
"june" => 6,
"july" => 7,
"august" => 8,
"september" => 9,
"october" => 10,
"november" => 11,
"december" => 12,
"jan" => 1,
"feb" => 2,
"mar" => 3,
"apr" => 4,
"jun" => 6,
"jul" => 7,
"aug" => 8,
"sept" => 9,
"sep" => 9,
"oct" => 10,
"nov" => 11,
"dec" => 12
}
@date_separator ~w(, . / -) ++ [" "]
def vocal_month_to_numeric_month(value), do: Map.get(@months_map, value)
def to_integer(value) when is_binary(value), do: String.to_integer(value)
defp space_separator do
string(" ")
end
def year4 do
[?0..?9]
|> ascii_char()
|> times(4)
|> tag(:year)
|> label("4 digit year")
end
def year do
[?0..?9]
|> ascii_char()
|> times(max: 4, min: 2)
|> tag(:year)
|> label("2 or 4 digit year")
end
def vocal_month do
@months_map
|> Map.keys()
|> Enum.sort_by(&byte_size/1)
|> Enum.reverse()
|> Enum.map(&string/1)
|> choice()
|> concat(string(".") |> optional() |> ignore())
|> map(:vocal_month_to_numeric_month)
|> unwrap_and_tag(:month)
|> label("word month either fully spelled or 3-letter abbreviation")
end
def numeric_month do
choice([
numeric_month2(),
numeric_month1()
])
|> map(:to_integer)
|> unwrap_and_tag(:month)
|> label("numeric month from 01-12")
end
def numeric_month2 do
~w(01 02 03 04 05 06 07 08 09 10 11 12)
|> Enum.map(&string/1)
|> choice()
end
def numeric_month1 do
1..9
|> Enum.map(&to_string/1)
|> Enum.map(&string/1)
|> choice()
|> lookahead_not(integer(1))
end
def day do
choice([
day2(),
day1()
])
|> map(:to_integer)
|> unwrap_and_tag(:day)
|> label("numeric day from 01-31")
end
def day2 do
(~w(01 02 03 04 05 06 07 08 09) ++ Enum.map(10..31, &to_string/1))
|> Enum.map(&string/1)
|> choice()
end
def day1 do
1..9
|> Enum.map(&to_string/1)
|> Enum.map(&string/1)
|> choice()
|> lookahead_not(integer(1))
end
def date_separator do
@date_separator
|> Enum.map(&string/1)
|> choice()
|> ignore()
|> label("date separator")
end
def month do
choice([
numeric_month(),
vocal_month()
])
end
def month_day do
month()
|> concat(date_separator() |> optional())
|> concat(day())
end
def day_month do
day()
|> concat(date_separator() |> optional())
|> concat(month())
end
def day_long_month_year do
day()
|> concat(date_separator() |> optional())
|> concat(vocal_month())
|> concat(date_separator() |> optional())
|> concat(year())
end
def year_month_day do
year()
|> concat(date_separator() |> optional())
|> concat(month_day())
end
def month_day_year do
month_day()
|> concat(date_separator() |> optional())
|> concat(year())
end
def day_month_year4 do
day_month()
|> concat(date_separator())
|> concat(year4())
end
def day_month_year do
day_month()
|> concat(date_separator() |> optional())
|> concat(year())
end
def vocal_days_long do
@days_map
|> Map.values()
|> Enum.uniq()
|> Enum.map(&String.downcase/1)
|> Enum.sort_by(&byte_size/1)
|> Enum.reverse()
|> Enum.map(&string/1)
end
def vocal_days_short do
@days_map
|> Map.keys()
|> Enum.uniq()
|> Enum.map(&String.downcase/1)
|> Enum.sort_by(&byte_size/1)
|> Enum.reverse()
|> Enum.map(&string/1)
end
def vocal_day do
(vocal_days_long() ++ vocal_days_short())
|> choice()
|> unwrap_and_tag(:vocal_day)
|> label("vocal day spelled out")
|> concat(space_separator() |> optional() |> ignore())
end
def formal_date do
choice([
day_long_month_year(),
day_month_year4(),
year_month_day(),
day_month_year(),
month_day_year(),
day_month(),
month_day()
])
end
def us_date do
choice([
day_long_month_year(),
month_day_year(),
day_month_year(),
year_month_day(),
day_month(),
month_day()
])
end
end
| 19.3125 | 75 | 0.545415 |
f7a5b30585bc8e79c824d152d0b881a8bb349a91 | 37,403 | ex | Elixir | clients/composer/lib/google_api/composer/v1/api/projects.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/composer/lib/google_api/composer/v1/api/projects.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/composer/lib/google_api/composer/v1/api/projects.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Composer.V1.Api.Projects do
@moduledoc """
API calls for all endpoints tagged `Projects`.
"""
alias GoogleApi.Composer.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Create a new environment.
## Parameters
* `connection` (*type:* `GoogleApi.Composer.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `parent`. The parent must be of the form "projects/{projectId}/locations/{locationId}".
* `locations_id` (*type:* `String.t`) - Part of `parent`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.Composer.V1.Model.Environment.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Composer.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec composer_projects_locations_environments_create(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Composer.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def composer_projects_locations_environments_create(
connection,
projects_id,
locations_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{projectsId}/locations/{locationsId}/environments", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Composer.V1.Model.Operation{}])
end
@doc """
Delete an environment.
## Parameters
* `connection` (*type:* `GoogleApi.Composer.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. The environment to delete, in the form: "projects/{projectId}/locations/{locationId}/environments/{environmentId}"
* `locations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `environments_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Composer.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec composer_projects_locations_environments_delete(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Composer.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def composer_projects_locations_environments_delete(
connection,
projects_id,
locations_id,
environments_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url(
"/v1/projects/{projectsId}/locations/{locationsId}/environments/{environmentsId}",
%{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1),
"environmentsId" => URI.encode(environments_id, &(URI.char_unreserved?(&1) || &1 == ?/))
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Composer.V1.Model.Operation{}])
end
@doc """
Get an existing environment.
## Parameters
* `connection` (*type:* `GoogleApi.Composer.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. The resource name of the environment to get, in the form: "projects/{projectId}/locations/{locationId}/environments/{environmentId}"
* `locations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `environments_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Composer.V1.Model.Environment{}}` on success
* `{:error, info}` on failure
"""
@spec composer_projects_locations_environments_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Composer.V1.Model.Environment.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def composer_projects_locations_environments_get(
connection,
projects_id,
locations_id,
environments_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url(
"/v1/projects/{projectsId}/locations/{locationsId}/environments/{environmentsId}",
%{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1),
"environmentsId" => URI.encode(environments_id, &(URI.char_unreserved?(&1) || &1 == ?/))
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Composer.V1.Model.Environment{}])
end
@doc """
List environments.
## Parameters
* `connection` (*type:* `GoogleApi.Composer.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `parent`. List environments in the given project and location, in the form: "projects/{projectId}/locations/{locationId}"
* `locations_id` (*type:* `String.t`) - Part of `parent`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - The maximum number of environments to return.
* `:pageToken` (*type:* `String.t`) - The next_page_token value returned from a previous List request, if any.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Composer.V1.Model.ListEnvironmentsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec composer_projects_locations_environments_list(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Composer.V1.Model.ListEnvironmentsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def composer_projects_locations_environments_list(
connection,
projects_id,
locations_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/projects/{projectsId}/locations/{locationsId}/environments", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Composer.V1.Model.ListEnvironmentsResponse{}])
end
@doc """
Update an environment.
## Parameters
* `connection` (*type:* `GoogleApi.Composer.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. The relative resource name of the environment to update, in the form: "projects/{projectId}/locations/{locationId}/environments/{environmentId}"
* `locations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `environments_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - Required. A comma-separated list of paths, relative to `Environment`, of fields to update. For example, to set the version of scikit-learn to install in the environment to 0.19.0 and to remove an existing installation of numpy, the `updateMask` parameter would include the following two `paths` values: "config.softwareConfig.pypiPackages.scikit-learn" and "config.softwareConfig.pypiPackages.numpy". The included patch environment would specify the scikit-learn version as follows: { "config":{ "softwareConfig":{ "pypiPackages":{ "scikit-learn":"==0.19.0" } } } } Note that in the above example, any existing PyPI packages other than scikit-learn and numpy will be unaffected. Only one update type may be included in a single request's `updateMask`. For example, one cannot update both the PyPI packages and labels in the same request. However, it is possible to update multiple members of a map field simultaneously in the same request. For example, to set the labels "label1" and "label2" while clearing "label3" (assuming it already exists), one can provide the paths "labels.label1", "labels.label2", and "labels.label3" and populate the patch environment as follows: { "labels":{ "label1":"new-label1-value" "label2":"new-label2-value" } } Note that in the above example, any existing labels that are not included in the `updateMask` will be unaffected. It is also possible to replace an entire map field by providing the map field's path in the `updateMask`. The new value of the field will be that which is provided in the patch environment. For example, to delete all pre-existing user-specified PyPI packages and install botocore at version 1.7.14, the `updateMask` would contain the path "config.softwareConfig.pypiPackages", and the patch environment would be the following: { "config":{ "softwareConfig":{ "pypiPackages":{ "botocore":"==1.7.14" } } } } **Note:** Only the following fields can be updated: *Mask* *Purpose* config.softwareConfig.pypiPackages Replace all custom custom PyPI packages. If a replacement package map is not included in `environment`, all custom PyPI packages are cleared. It is an error to provide both this mask and a mask specifying an individual package. config.softwareConfig.pypiPackages.packagename Update the custom PyPI package packagename, preserving other packages. To delete the package, include it in `updateMask`, and omit the mapping for it in `environment.config.softwareConfig.pypiPackages`. It is an error to provide both a mask of this form and the "config.softwareConfig.pypiPackages" mask. labels Replace all environment labels. If a replacement labels map is not included in `environment`, all labels are cleared. It is an error to provide both this mask and a mask specifying one or more individual labels. labels.labelName Set the label named labelName, while preserving other labels. To delete the label, include it in `updateMask` and omit its mapping in `environment.labels`. It is an error to provide both a mask of this form and the "labels" mask. config.nodeCount Horizontally scale the number of nodes in the environment. An integer greater than or equal to 3 must be provided in the `config.nodeCount` field. config.softwareConfig.airflowConfigOverrides Replace all Apache Airflow config overrides. If a replacement config overrides map is not included in `environment`, all config overrides are cleared. It is an error to provide both this mask and a mask specifying one or more individual config overrides. config.softwareConfig.airflowConfigOverrides.section-name Override the Apache Airflow config property name in the section named section, preserving other properties. To delete the property override, include it in `updateMask` and omit its mapping in `environment.config.softwareConfig.airflowConfigOverrides`. It is an error to provide both a mask of this form and the "config.softwareConfig.airflowConfigOverrides" mask. config.softwareConfig.envVariables Replace all environment variables. If a replacement environment variable map is not included in `environment`, all custom environment variables are cleared. It is an error to provide both this mask and a mask specifying one or more individual environment variables.
* `:body` (*type:* `GoogleApi.Composer.V1.Model.Environment.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Composer.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec composer_projects_locations_environments_patch(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Composer.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def composer_projects_locations_environments_patch(
connection,
projects_id,
locations_id,
environments_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url(
"/v1/projects/{projectsId}/locations/{locationsId}/environments/{environmentsId}",
%{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1),
"environmentsId" => URI.encode(environments_id, &(URI.char_unreserved?(&1) || &1 == ?/))
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Composer.V1.Model.Operation{}])
end
@doc """
List ImageVersions for provided location.
## Parameters
* `connection` (*type:* `GoogleApi.Composer.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `parent`. List ImageVersions in the given project and location, in the form: "projects/{projectId}/locations/{locationId}"
* `locations_id` (*type:* `String.t`) - Part of `parent`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - The maximum number of image_versions to return.
* `:pageToken` (*type:* `String.t`) - The next_page_token value returned from a previous List request, if any.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Composer.V1.Model.ListImageVersionsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec composer_projects_locations_image_versions_list(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Composer.V1.Model.ListImageVersionsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def composer_projects_locations_image_versions_list(
connection,
projects_id,
locations_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/projects/{projectsId}/locations/{locationsId}/imageVersions", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Composer.V1.Model.ListImageVersionsResponse{}])
end
@doc """
Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
## Parameters
* `connection` (*type:* `GoogleApi.Composer.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. The name of the operation resource to be deleted.
* `locations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `operations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Composer.V1.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec composer_projects_locations_operations_delete(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) :: {:ok, GoogleApi.Composer.V1.Model.Empty.t()} | {:ok, Tesla.Env.t()} | {:error, any()}
def composer_projects_locations_operations_delete(
connection,
projects_id,
locations_id,
operations_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url(
"/v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}",
%{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1),
"operationsId" => URI.encode(operations_id, &(URI.char_unreserved?(&1) || &1 == ?/))
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Composer.V1.Model.Empty{}])
end
@doc """
Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
## Parameters
* `connection` (*type:* `GoogleApi.Composer.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. The name of the operation resource.
* `locations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `operations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Composer.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec composer_projects_locations_operations_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Composer.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def composer_projects_locations_operations_get(
connection,
projects_id,
locations_id,
operations_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url(
"/v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}",
%{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1),
"operationsId" => URI.encode(operations_id, &(URI.char_unreserved?(&1) || &1 == ?/))
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Composer.V1.Model.Operation{}])
end
@doc """
Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
## Parameters
* `connection` (*type:* `GoogleApi.Composer.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. The name of the operation's parent resource.
* `locations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - The standard list filter.
* `:pageSize` (*type:* `integer()`) - The standard list page size.
* `:pageToken` (*type:* `String.t`) - The standard list page token.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Composer.V1.Model.ListOperationsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec composer_projects_locations_operations_list(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Composer.V1.Model.ListOperationsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def composer_projects_locations_operations_list(
connection,
projects_id,
locations_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/projects/{projectsId}/locations/{locationsId}/operations", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Composer.V1.Model.ListOperationsResponse{}])
end
end
| 50.750339 | 4,251 | 0.634227 |
f7a5f77bd5d373373a80a940dbd46633324bd0bd | 288 | exs | Elixir | priv/repo/migrations/20201230174327_add_fundraiser_to_fundraiser_admins.exs | jfcloutier/freegiving | 2ab3821595996fc295c5b55515d6f60cbce05181 | [
"Unlicense"
] | null | null | null | priv/repo/migrations/20201230174327_add_fundraiser_to_fundraiser_admins.exs | jfcloutier/freegiving | 2ab3821595996fc295c5b55515d6f60cbce05181 | [
"Unlicense"
] | null | null | null | priv/repo/migrations/20201230174327_add_fundraiser_to_fundraiser_admins.exs | jfcloutier/freegiving | 2ab3821595996fc295c5b55515d6f60cbce05181 | [
"Unlicense"
] | null | null | null | defmodule Freegiving.Repo.Migrations.AddFundraiserToFundraiserAdmins do
use Ecto.Migration
def change do
alter table("fundraiser_admins") do
add :fundraiser_id, references("fundraisers"), null: false
end
create index("fundraiser_admins", :fundraiser_id)
end
end
| 24 | 71 | 0.756944 |
f7a64f46d01ae3d23a59f827bed5891cd4ae03f7 | 233 | ex | Elixir | test/environment/ecto/comment.ex | danbruder/dilute | 0b2a5c86ff920c1171388ae23e767a956e1978a9 | [
"Apache-2.0"
] | null | null | null | test/environment/ecto/comment.ex | danbruder/dilute | 0b2a5c86ff920c1171388ae23e767a956e1978a9 | [
"Apache-2.0"
] | null | null | null | test/environment/ecto/comment.ex | danbruder/dilute | 0b2a5c86ff920c1171388ae23e767a956e1978a9 | [
"Apache-2.0"
] | null | null | null | defmodule DiluteTest.Environment.Ecto.Comment do
use Ecto.Schema
schema "comments" do
field(:content, :string)
field(:votes, :integer)
belongs_to(:post, DiluteTest.Environment.Ecto.Post)
timestamps()
end
end
| 17.923077 | 55 | 0.712446 |
f7a664ad2d7faa3e1fa4f5a5973ed39c2f34c1dc | 1,044 | ex | Elixir | test/support/fake_queue.ex | learn-co/railway_ipc | aeec16fb5b315fb3d8472b38c6eeea20d20e731a | [
"MIT"
] | 2 | 2021-03-22T19:37:33.000Z | 2022-01-04T08:48:20.000Z | test/support/fake_queue.ex | learn-co/railway_ipc | aeec16fb5b315fb3d8472b38c6eeea20d20e731a | [
"MIT"
] | 10 | 2019-11-29T20:24:24.000Z | 2021-02-26T22:06:13.000Z | test/support/fake_queue.ex | learn-co/railway_ipc | aeec16fb5b315fb3d8472b38c6eeea20d20e731a | [
"MIT"
] | 1 | 2020-01-09T17:13:29.000Z | 2020-01-09T17:13:29.000Z | defmodule Test.Support.FakeQueue do
@moduledoc """
Message bus implemented as an in-memory queue for tests.
"""
@behaviour RailwayIpc.MessageBus
@me __MODULE__
alias RailwayIpc.MessageBus.Publisher
def init do
Agent.start_link(fn -> [] end, name: @me)
end
@impl RailwayIpc.MessageBus
def publish(_channel, exchange, encoded, format) do
if Regex.match?(~r/"publish":false/, encoded) do
{:error, "failed to publish"}
else
msg = %{exchange: exchange, encoded: encoded, format: format}
Agent.update(@me, fn messages -> [msg | messages] end)
{:ok, true}
end
end
@impl RailwayIpc.MessageBus
def setup_publisher do
{:ok, %Publisher{}}
end
@impl RailwayIpc.MessageBus
def cleanup_publisher(%Publisher{}) do
Agent.update(@me, fn _ -> [] end)
:ok
end
def message_count do
Enum.count(messages())
end
def has_message?(msg_map) do
Enum.member?(messages(), msg_map)
end
def messages do
Agent.get(@me, fn messages -> messages end)
end
end
| 20.88 | 67 | 0.662835 |
f7a681132e96742c484460ede4a7f3fc2a6dc157 | 10,818 | ex | Elixir | lib/elixir/lib/process.ex | joearms/elixir | 9a0f8107bd8bbd089acb96fe0041d61a05e88a9b | [
"Apache-2.0"
] | 4 | 2016-04-05T05:51:36.000Z | 2019-10-31T06:46:35.000Z | lib/elixir/lib/process.ex | joearms/elixir | 9a0f8107bd8bbd089acb96fe0041d61a05e88a9b | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/process.ex | joearms/elixir | 9a0f8107bd8bbd089acb96fe0041d61a05e88a9b | [
"Apache-2.0"
] | 5 | 2015-02-01T06:01:19.000Z | 2019-08-29T09:02:35.000Z | defmodule Process do
@moduledoc """
This module provides convenience functions around processes and
the process dictionary. In Erlang, most of these functions are
auto-imported, but in Elixir they are grouped in a module for
convenience. Notice that these functions, different from Erlang's,
always return nil instead of undefined. You can use their Erlang
version if you want the undefined value.
"""
@doc """
Returns true if the process exists and is alive, that is,
is not exiting and has not exited. Otherwise, returns false.
`pid` must refer to a process at the local node.
"""
@spec alive?(pid) :: boolean
def alive?(pid) do
:erlang.is_process_alive(pid)
end
@doc """
Returns all key-values in the dictionary.
"""
@spec get :: [{term, term}]
def get do
:erlang.get()
end
@doc """
Returns the value for the given key.
"""
@spec get(term) :: term
@spec get(term, default :: term) :: term
def get(key, default // nil) do
case :erlang.get(key) do
:undefined ->
default
value ->
value
end
end
@doc """
Returns all keys that have the given `value`.
"""
@spec get_keys(term) :: [term]
def get_keys(value) do
:erlang.get_keys(value)
end
@doc """
Stores the given key-value in the process dictionary.
"""
@spec put(term, term) :: term | nil
def put(key, value) do
nillify :erlang.put(key, value)
end
@doc """
Deletes all items in the dictionary.
"""
@spec delete :: [{term, term}]
def delete() do
:erlang.erase()
end
@doc """
Deletes the given key from the dictionary.
"""
@spec delete(term) :: term | nil
def delete(key) do
nillify :erlang.erase(key)
end
@doc """
Sends an exit signal with the given reason to the pid.
The following behavior apply if reason is any term except `:normal` or `:kill`:
1) If pid is not trapping exits, pid itself will exist with the given reason;
2) If pid is trapping exits, the exit signal is transformed into a message
{'EXIT', from, reason} and delivered to the message queue of pid;
3) If reason is the atom `:normal`, pid will not exit. If it is trapping exits,
the exit signal is transformed into a message {'EXIT', from, :normal} and
delivered to its message queue;
4) If reason is the atom `:kill`, that is if `exit(pid, :kill)` is called, an
untrappable exit signal is sent to pid which will unconditionally exit with
exit reason `:killed`.
## Examples
Process.exit(pid, :kill)
"""
@spec exit(pid, term) :: true
def exit(pid, reason) do
:erlang.exit(pid, reason)
end
@doc """
Returns the pid of a new process started by the application of `fun`.
It behaves exactly the same as `Kernel.spawn/1`.
"""
@spec spawn((() -> any)) :: pid
def spawn(fun) do
:erlang.spawn(fun)
end
@type spawn_opt :: :link | :monitor | {:priority, :low | :normal | :high} |
{:fullsweep_after, non_neg_integer} |
{:min_heap_size, non_neg_integer} |
{:min_bin_vheap_size, non_neg_integer}
@type spawn_opts :: [spawn_opt]
@doc """
Returns the pid of a new process started by the application of `fun`.
It also accepts extra options, for the list of available options
check http://www.erlang.org/doc/man/erlang.html#spawn_opt-2
"""
@spec spawn((() -> any), spawn_opts) :: pid | {pid, reference}
def spawn(fun, opts) do
:erlang.spawn_opt(fun, opts)
end
@doc """
Returns the pid of a new process started by the application of
`module.function(args)`. The new process created will be placed in the system
scheduler queue and be run some time later.
It behaves exactly the same as the `Kernel.spawn/3` function.
"""
@spec spawn(module, atom, [any]) :: pid
def spawn(mod, fun, args) do
:erlang.spawn(mod, fun, args)
end
@doc """
Returns the pid of a new process started by the application of
`module.function(args)`. The new process created will be placed in the system
scheduler queue and be run some time later.
It also accepts extra options, for the list of available options
check http://www.erlang.org/doc/man/erlang.html#spawn_opt-4
"""
@spec spawn(module, atom, [any], spawn_opts) :: pid | {pid, reference}
def spawn(mod, fun, args, opts) do
:erlang.spawn_opt(mod, fun, args, opts)
end
@doc """
Returns the pid of a new process started by the application of `fun`.
A link is created between the calling process and the new
process, atomically.
"""
@spec spawn_link((() -> any)) :: pid
def spawn_link(fun) do
:erlang.spawn_link(fun)
end
@doc """
Returns the pid of a new process started by the application of
`module.function(args)`. A link is created between the calling process
and the new process, atomically. Otherwise works like spawn/3.
"""
@spec spawn_link(module, atom, [any]) :: pid
def spawn_link(mod, fun, args) do
:erlang.spawn_link(mod, fun, args)
end
@doc """
Returns the pid of a new process started by the application of `fun`
and reference for a monitor created to the new process.
"""
@spec spawn_monitor((() -> any)) :: {pid, reference}
def spawn_monitor(fun) do
:erlang.spawn_monitor(fun)
end
@doc """
A new process is started by the application of `module.function(args)`
and the process is monitored at the same time. Returns the pid and a
reference for the monitor. Otherwise works like spawn/3.
"""
@spec spawn_monitor(module, atom, [any]) :: {pid, reference}
def spawn_monitor(mod, fun, args) do
:erlang.spawn_monitor(mod, fun, args)
end
@doc """
The calling process starts monitoring the item given.
It returns the monitor reference.
See http://www.erlang.org/doc/man/erlang.html#monitor-2 for more info.
"""
@spec monitor(pid | {reg_name :: atom, node :: atom} | reg_name :: atom) :: reference
def monitor(item) do
:erlang.monitor(:process, item)
end
@doc """
If monitor_ref is a reference which the calling process
obtained by calling monitor/1, this monitoring is turned off.
If the monitoring is already turned off, nothing happens.
See http://www.erlang.org/doc/man/erlang.html#demonitor-2 for more info.
"""
@spec demonitor(reference) :: true
@spec demonitor(reference, options :: [:flush | :info]) :: boolean
def demonitor(monitor_ref, options // []) do
:erlang.demonitor(monitor_ref, options)
end
@doc """
Returns a list of process identifiers corresponding to all the
processes currently existing on the local node.
Note that a process that is exiting, exists but is not alive, i.e.,
alive?/1 will return false for a process that is exiting,
but its process identifier will be part of the result returned.
See http://www.erlang.org/doc/man/erlang.html#processes-0 for more info.
"""
@spec list :: [pid]
def list do
:erlang.processes()
end
@doc """
Creates a link between the calling process and another process
(or port) `pid`, if there is not such a link already.
See http://www.erlang.org/doc/man/erlang.html#link-1 for more info.
"""
@spec link(pid | port) :: true
def link(pid) do
:erlang.link(pid)
end
@doc """
Removes the link, if there is one, between the calling process and
the process or port referred to by `pid`. Returns true and does not
fail, even if there is no link or `id` does not exist
See http://www.erlang.org/doc/man/erlang.html#unlink-1 for more info.
"""
@spec unlink(pid | port) :: true
def unlink(pid) do
:erlang.unlink(pid)
end
@doc """
Associates the name with a pid or a port identifier. name, which must
be an atom, can be used instead of the pid / port identifier in the
send operator (name <- message).
See http://www.erlang.org/doc/man/erlang.html#register-2 for more info.
"""
@spec register(pid | port, atom) :: true
def register(pid, name) do
:erlang.register(name, pid)
end
@doc """
Removes the registered name, associated with a pid or a port identifier.
See http://www.erlang.org/doc/man/erlang.html#unregister-1 for more info.
"""
@spec unregister(atom) :: true
def unregister(name) do
:erlang.unregister(name)
end
@doc """
Returns the pid or port identifier with the registered name.
Returns undefined if the name is not registered.
See http://www.erlang.org/doc/man/erlang.html#whereis-1 for more info.
"""
@spec whereis(atom) :: pid | port | nil
def whereis(name) do
nillify :erlang.whereis(name)
end
@doc """
Returns the pid of the group leader for the process which evaluates the function.
"""
@spec group_leader :: pid
def group_leader do
:erlang.group_leader
end
@doc """
Sets the group leader of Pid to GroupLeader. Typically, this is used when a processes
started from a certain shell should have another group leader than `:init`.
"""
@spec group_leader(leader :: pid, pid) :: true
def group_leader(leader, pid) do
:erlang.group_leader(leader, pid)
end
@doc """
Returns a list of names which have been registered using register/2.
"""
@spec registered :: [atom]
def registered do
:erlang.registered()
end
@typep process_flag :: :trap_exit | :error_handler | :min_heap_size |
:min_bin_vheap_size | :priority | :save_calls |
:sensitive
@doc """
Sets certain flags for the process which calls this function.
Returns the old value of the flag.
See http://www.erlang.org/doc/man/erlang.html#process_flag-2 for more info.
"""
@spec flag(process_flag, term) :: term
def flag(flag, value) do
:erlang.process_flag(flag, value)
end
@doc """
Sets certain flags for the process Pid, in the same manner as flag/2.
Returns the old value of the flag. The allowed values for Flag are
only a subset of those allowed in flag/2, namely: save_calls.
See http://www.erlang.org/doc/man/erlang.html#process_flag-3 for more info.
"""
@spec flag(pid, process_flag, term) :: term
def flag(pid, flag, value) do
:erlang.process_flag(pid, flag, value)
end
@doc """
Returns information about the process identified by pid.
Use this only for debugging information.
See http://www.erlang.org/doc/man/erlang.html#process_info-1 for more info.
"""
@spec info(pid) :: Keyword.t
def info(pid) do
:erlang.process_info(pid)
end
@doc """
Returns information about the process identified by pid
or undefined if the process is not alive.
See http://www.erlang.org/doc/man/erlang.html#process_info-2 for more info.
"""
@spec info(pid, atom) :: {atom, term}
def info(pid, spec) do
:erlang.process_info(pid, spec)
end
defp nillify(:undefined), do: nil
defp nillify(other), do: other
end
| 29.557377 | 87 | 0.674986 |
f7a68b3949cc4221ca09bcf4856a08f524323d09 | 2,251 | exs | Elixir | backend/test/edgehog/astarte/device/storage_usage_test.exs | harlem88/edgehog | 7a278d119c3d592431fdbba406207376e194f7eb | [
"Apache-2.0"
] | null | null | null | backend/test/edgehog/astarte/device/storage_usage_test.exs | harlem88/edgehog | 7a278d119c3d592431fdbba406207376e194f7eb | [
"Apache-2.0"
] | null | null | null | backend/test/edgehog/astarte/device/storage_usage_test.exs | harlem88/edgehog | 7a278d119c3d592431fdbba406207376e194f7eb | [
"Apache-2.0"
] | null | null | null | #
# This file is part of Edgehog.
#
# Copyright 2021 SECO Mind Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule Edgehog.Astarte.Device.StorageUsageTest do
use Edgehog.DataCase
alias Edgehog.Astarte.Device.StorageUsage
alias Edgehog.Astarte.Device.StorageUsage.StorageUnit
alias Astarte.Client.AppEngine
describe "storage_usage" do
import Edgehog.AstarteFixtures
import Tesla.Mock
setup do
cluster = cluster_fixture()
realm = realm_fixture(cluster)
device = device_fixture(realm)
{:ok, appengine_client} =
AppEngine.new(cluster.base_api_url, realm.name, private_key: realm.private_key)
{:ok, cluster: cluster, realm: realm, device: device, appengine_client: appengine_client}
end
test "get/2 correctly parses storage usage data", %{
device: device,
appengine_client: appengine_client
} do
response = %{
"data" => %{
"nvs1" => [
%{
"freeBytes" => "7000",
"timestamp" => "2021-11-30T10:45:00.575Z",
"totalBytes" => "16128"
}
],
"nvs2" => [
%{
"freeBytes" => "5000",
"timestamp" => "2021-11-30T10:41:48.575Z",
"totalBytes" => "8064"
}
]
}
}
mock(fn
%{method: :get, url: _api_url} ->
json(response)
end)
assert {:ok, storage_units} = StorageUsage.get(appengine_client, device.device_id)
assert storage_units == [
%StorageUnit{free_bytes: 7000, label: "nvs1", total_bytes: 16128},
%StorageUnit{free_bytes: 5000, label: "nvs2", total_bytes: 8064}
]
end
end
end
| 28.858974 | 95 | 0.61928 |
f7a69164ea52c8b51ee2d742c7411958649ed9e4 | 1,071 | exs | Elixir | test/mix/tasks/maintenance.disable_test.exs | wnuqui/on_maintenance | 90bd211d0da77e0708d8f27adba0dd32399a8ca2 | [
"MIT"
] | 13 | 2017-04-13T05:36:56.000Z | 2020-07-19T02:40:09.000Z | test/mix/tasks/maintenance.disable_test.exs | wnuqui/on_maintenance | 90bd211d0da77e0708d8f27adba0dd32399a8ca2 | [
"MIT"
] | null | null | null | test/mix/tasks/maintenance.disable_test.exs | wnuqui/on_maintenance | 90bd211d0da77e0708d8f27adba0dd32399a8ca2 | [
"MIT"
] | 1 | 2021-09-03T19:31:05.000Z | 2021-09-03T19:31:05.000Z | Mix.shell(Mix.Shell.Process)
defmodule Mix.Tasks.Maintenance.DisableTest do
use ExUnit.Case
import Plug.OnMaintenance.Util
setup do
File.rm on_maintenance_db()
Mix.Tasks.Maintenance.InitConfigStore.run([])
on_exit fn ->
File.rm on_maintenance_db()
end
end
describe "run/1" do
setup do
assert_received {:mix_shell, :info, [_]}
{:ok, db} = Sqlitex.open(on_maintenance_db())
Mix.Tasks.Maintenance.Disable.run([])
%{db: db}
end
test "sets `on_maintenance` column to 0", %{db: db} do
[[id: _, on_maintenance: on_maintenance, retry_after: _]] =
Sqlitex.query!(db, select_sql())
assert on_maintenance == 0
end
test "sets `retry_after` column to 0", %{db: db} do
[[id: _, on_maintenance: _, retry_after: retry_after]] =
Sqlitex.query!(db, select_sql())
assert retry_after == 0
end
test "prints log to shell" do
assert_received {:mix_shell, :info, [info]}
assert info == "Disabling maintenance mode for application."
end
end
end
| 24.906977 | 66 | 0.639589 |
f7a6bd00f505d8f639dbd35c9b140c5a7117fe0f | 1,680 | ex | Elixir | lib/inch_ex/ui/shell.ex | asaaki/inch_ex | 38874a6050663f97c9cbfea29e9b413b4eea1a7b | [
"MIT"
] | 200 | 2015-01-05T19:08:30.000Z | 2022-02-23T01:57:09.000Z | lib/inch_ex/ui/shell.ex | asaaki/inch_ex | 38874a6050663f97c9cbfea29e9b413b4eea1a7b | [
"MIT"
] | 57 | 2015-01-03T12:07:28.000Z | 2021-10-31T10:26:31.000Z | lib/inch_ex/ui/shell.ex | asaaki/inch_ex | 38874a6050663f97c9cbfea29e9b413b4eea1a7b | [
"MIT"
] | 101 | 2015-01-18T17:20:20.000Z | 2022-03-07T23:28:53.000Z | defmodule InchEx.UI.Shell do
@moduledoc """
This module is used by `Credo.CLI.Output.UI` to write to the shell.
"""
use GenServer
def start_link(opts \\ []) do
{:ok, _pid} = GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
def puts do
puts("")
end
def puts(value) do
GenServer.call(__MODULE__, {:puts, value})
end
def use_colors(use_colors) do
GenServer.call(__MODULE__, {:use_colors, use_colors})
end
@doc "Like `puts`, but writes to `:stderr`."
def warn(value) do
GenServer.call(__MODULE__, {:warn, value})
end
# callbacks
def init(_) do
{:ok, %{use_colors: true}}
end
def handle_call({:use_colors, use_colors}, _from, current_state) do
new_state = Map.put(current_state, :use_colors, use_colors)
{:reply, nil, new_state}
end
def handle_call({:puts, value}, _from, %{use_colors: true} = current_state) do
do_puts(value)
{:reply, nil, current_state}
end
def handle_call({:puts, value}, _from, %{use_colors: false} = current_state) do
value
|> remove_colors()
|> do_puts()
{:reply, nil, current_state}
end
def handle_call({:warn, value}, _from, %{use_colors: true} = current_state) do
do_warn(value)
{:reply, nil, current_state}
end
def handle_call({:warn, value}, _from, %{use_colors: false} = current_state) do
value
|> remove_colors()
|> do_warn()
{:reply, nil, current_state}
end
defp remove_colors(value) do
value
|> List.wrap()
|> List.flatten()
|> Enum.reject(&is_atom/1)
end
defp do_puts(value) do
Bunt.puts(value)
end
defp do_warn(value) do
Bunt.warn(value)
end
end
| 20 | 81 | 0.645238 |
f7a6d49c6371e36990585d997aff155bb0bdbe80 | 31 | ex | Elixir | testData/org/elixir_lang/parser_definition/literal_char_list_sigil_heredoc_parsing_test_case/WhitespaceEndPrefix.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/literal_char_list_sigil_heredoc_parsing_test_case/WhitespaceEndPrefix.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/literal_char_list_sigil_heredoc_parsing_test_case/WhitespaceEndPrefix.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | ~C'''
hi
there'''
''' | 7.75 | 11 | 0.258065 |
f7a6eaa3aa9113a7345f33998ce9b47a97fd4b8c | 13,721 | exs | Elixir | lib/elixir/test/elixir/kernel/overridable_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 19,291 | 2015-01-01T02:42:49.000Z | 2022-03-31T21:01:40.000Z | lib/elixir/test/elixir/kernel/overridable_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 8,082 | 2015-01-01T04:16:23.000Z | 2022-03-31T22:08:02.000Z | lib/elixir/test/elixir/kernel/overridable_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 3,472 | 2015-01-03T04:11:56.000Z | 2022-03-29T02:07:30.000Z | Code.require_file("../test_helper.exs", __DIR__)
defmodule Kernel.Overridable do
def sample do
1
end
def with_super do
1
end
def without_super do
1
end
def super_with_multiple_args(x, y) do
x + y
end
def capture_super(x) do
x
end
defmacro capture_super_macro(x) do
x
end
def many_clauses(0) do
11
end
def many_clauses(1) do
13
end
def locals do
undefined_function()
end
def multiple_overrides do
[1]
end
def public_to_private do
:public
end
defoverridable sample: 0,
with_super: 0,
without_super: 0,
super_with_multiple_args: 2,
capture_super: 1,
capture_super_macro: 1,
many_clauses: 1,
locals: 0,
multiple_overrides: 0,
public_to_private: 0
true = Module.overridable?(__MODULE__, {:without_super, 0})
true = Module.overridable?(__MODULE__, {:with_super, 0})
true = {:with_super, 0} in Module.overridables_in(__MODULE__)
true = {:without_super, 0} in Module.overridables_in(__MODULE__)
def without_super do
:without_super
end
def with_super do
super() + 2
end
true = Module.overridable?(__MODULE__, {:without_super, 0})
true = Module.overridable?(__MODULE__, {:with_super, 0})
true = {:with_super, 0} in Module.overridables_in(__MODULE__)
true = {:without_super, 0} in Module.overridables_in(__MODULE__)
def super_with_multiple_args(x, y) do
super(x, y * 2)
end
def capture_super(x) do
Enum.map(1..x, &super(&1)) ++ Enum.map(1..x, &super/1)
end
defmacro capture_super_macro(x) do
Enum.map(1..x, &super(&1)) ++ Enum.map(1..x, &super/1)
end
def many_clauses(2) do
17
end
def many_clauses(3) do
super(0) + super(1)
end
def many_clauses(x) do
super(x)
end
def locals do
:ok
end
def multiple_overrides do
[2 | super()]
end
defp public_to_private do
:private
end
def test_public_to_private do
public_to_private()
end
defoverridable multiple_overrides: 0
def multiple_overrides do
[3 | super()]
end
## Macros
defmacro overridable_macro(x) do
quote do
unquote(x) + 100
end
end
defoverridable overridable_macro: 1
defmacro overridable_macro(x) do
quote do
unquote(super(x)) + 1000
end
end
defmacrop private_macro(x \\ raise("never called"))
defmacrop private_macro(x) do
quote do
unquote(x) + 100
end
end
defoverridable private_macro: 1
defmacrop private_macro(x) do
quote do
unquote(super(x)) + 1000
end
end
def private_macro_call(val \\ 11) do
private_macro(val)
end
end
defmodule Kernel.OverridableExampleBehaviour do
@callback required_callback :: any
@callback optional_callback :: any
@macrocallback required_macro_callback(arg :: any) :: Macro.t()
@macrocallback optional_macro_callback(arg :: any, arg2 :: any) :: Macro.t()
@optional_callbacks optional_callback: 0, optional_macro_callback: 2
end
defmodule Kernel.OverridableTest do
require Kernel.Overridable, as: Overridable
use ExUnit.Case
defp purge(module) do
:code.purge(module)
:code.delete(module)
end
test "overridable keeps function ordering" do
defmodule OverridableOrder do
def not_private(str) do
process_url(str)
end
def process_url(_str) do
:first
end
# There was a bug where the order in which we removed
# overridable expressions lead to errors. This module
# aims to guarantee removing process_url/1 before we
# remove the function that depends on it does not cause
# errors. If it compiles, it works!
defoverridable process_url: 1, not_private: 1
def process_url(_str) do
:second
end
end
end
test "overridable works with defaults" do
defmodule OverridableDefault do
def fun(value, opt \\ :from_parent) do
{value, opt}
end
defmacro macro(value, opt \\ :from_parent) do
{{value, opt}, Macro.escape(__CALLER__)}
end
# There was a bug where the default function would
# attempt to call its overridable name instead of
# func/1. If it compiles, it works!
defoverridable fun: 1, fun: 2, macro: 1, macro: 2
def fun(value) do
{value, super(value)}
end
defmacro macro(value) do
{{value, super(value)}, Macro.escape(__CALLER__)}
end
end
defmodule OverridableCall do
require OverridableDefault
OverridableDefault.fun(:foo)
OverridableDefault.macro(:bar)
end
end
test "overridable is made concrete if no other is defined" do
assert Overridable.sample() == 1
end
test "overridable overridden with super" do
assert Overridable.with_super() == 3
end
test "overridable overridden without super" do
assert Overridable.without_super() == :without_super
end
test "public overridable overridden as private function" do
assert Overridable.test_public_to_private() == :private
refute {:public_to_private, 0} in Overridable.module_info(:exports)
end
test "overridable locals are ignored without super" do
assert Overridable.locals() == :ok
end
test "calling super with multiple args" do
assert Overridable.super_with_multiple_args(1, 2) == 5
end
test "calling super using function captures" do
assert Overridable.capture_super(5) == [1, 2, 3, 4, 5, 1, 2, 3, 4, 5]
end
test "calling super of an overridable macro using function captures" do
assert Overridable.capture_super_macro(5) == [1, 2, 3, 4, 5, 1, 2, 3, 4, 5]
end
test "super as a variable" do
super = :ok
assert super == :ok
end
test "overridable with many clauses" do
assert Overridable.many_clauses(0) == 11
assert Overridable.many_clauses(1) == 13
assert Overridable.many_clauses(2) == 17
assert Overridable.many_clauses(3) == 24
end
test "overridable definitions are private" do
refute {:"with_super (overridable 0)", 0} in Overridable.module_info(:exports)
refute {:"with_super (overridable 1)", 0} in Overridable.module_info(:exports)
end
test "multiple overrides" do
assert Overridable.multiple_overrides() == [3, 2, 1]
end
test "overridable macros" do
a = 11
assert Overridable.overridable_macro(a) == 1111
assert Overridable.private_macro_call() == 1111
end
test "invalid super call" do
message =
"nofile:4: no super defined for foo/0 in module Kernel.OverridableOrder.Forwarding. " <>
"Overridable functions available are: bar/0"
assert_raise CompileError, message, fn ->
Code.eval_string("""
defmodule Kernel.OverridableOrder.Forwarding do
def bar(), do: 1
defoverridable bar: 0
def foo(), do: super()
end
""")
end
purge(Kernel.OverridableOrder.Forwarding)
end
test "invalid super call with different arity" do
message =
"nofile:4: super must be called with the same number of arguments as the current definition"
assert_raise CompileError, message, fn ->
Code.eval_string("""
defmodule Kernel.OverridableSuper.DifferentArities do
def bar(a), do: a
defoverridable bar: 1
def bar(_), do: super()
end
""")
end
end
test "invalid super capture with different arity" do
message =
"nofile:4: super must be called with the same number of arguments as the current definition"
assert_raise CompileError, message, fn ->
Code.eval_string("""
defmodule Kernel.OverridableSuperCapture.DifferentArities do
def bar(a), do: a
defoverridable bar: 1
def bar(_), do: (&super/0).()
end
""")
end
end
test "does not allow to override a macro as a function" do
message =
"nofile:4: cannot override macro (defmacro, defmacrop) foo/0 in module " <>
"Kernel.OverridableMacro.FunctionOverride as a function (def, defp)"
assert_raise CompileError, message, fn ->
Code.eval_string("""
defmodule Kernel.OverridableMacro.FunctionOverride do
defmacro foo(), do: :ok
defoverridable foo: 0
def foo(), do: :invalid
end
""")
end
purge(Kernel.OverridableMacro.FunctionOverride)
assert_raise CompileError, message, fn ->
Code.eval_string("""
defmodule Kernel.OverridableMacro.FunctionOverride do
defmacro foo(), do: :ok
defoverridable foo: 0
def foo(), do: :invalid
defoverridable foo: 0
def foo(), do: :invalid
end
""")
end
purge(Kernel.OverridableMacro.FunctionOverride)
assert_raise CompileError, message, fn ->
Code.eval_string("""
defmodule Kernel.OverridableMacro.FunctionOverride do
defmacro foo(), do: :ok
defoverridable foo: 0
def foo(), do: super()
end
""")
end
purge(Kernel.OverridableMacro.FunctionOverride)
end
test "does not allow to override a function as a macro" do
message =
"nofile:4: cannot override function (def, defp) foo/0 in module " <>
"Kernel.OverridableFunction.MacroOverride as a macro (defmacro, defmacrop)"
assert_raise CompileError, message, fn ->
Code.eval_string("""
defmodule Kernel.OverridableFunction.MacroOverride do
def foo(), do: :ok
defoverridable foo: 0
defmacro foo(), do: :invalid
end
""")
end
purge(Kernel.OverridableFunction.MacroOverride)
assert_raise CompileError, message, fn ->
Code.eval_string("""
defmodule Kernel.OverridableFunction.MacroOverride do
def foo(), do: :ok
defoverridable foo: 0
defmacro foo(), do: :invalid
defoverridable foo: 0
defmacro foo(), do: :invalid
end
""")
end
purge(Kernel.OverridableFunction.MacroOverride)
assert_raise CompileError, message, fn ->
Code.eval_string("""
defmodule Kernel.OverridableFunction.MacroOverride do
def foo(), do: :ok
defoverridable foo: 0
defmacro foo(), do: super()
end
""")
end
purge(Kernel.OverridableFunction.MacroOverride)
end
test "undefined functions can't be marked as overridable" do
message = "cannot make function foo/2 overridable because it was not defined"
assert_raise ArgumentError, message, fn ->
Code.eval_string("""
defmodule Kernel.OverridableOrder.Foo do
defoverridable foo: 2
end
""")
end
purge(Kernel.OverridableOrder.Foo)
end
test "overrides with behaviour" do
defmodule OverridableWithBehaviour do
@behaviour Elixir.Kernel.OverridableExampleBehaviour
def required_callback(), do: "original"
def optional_callback(), do: "original"
def not_a_behaviour_callback(), do: "original"
defmacro required_macro_callback(boolean) do
quote do
if unquote(boolean) do
"original"
end
end
end
defoverridable Elixir.Kernel.OverridableExampleBehaviour
defmacro optional_macro_callback(arg1, arg2), do: {arg1, arg2}
assert Module.overridable?(__MODULE__, {:required_callback, 0})
assert Module.overridable?(__MODULE__, {:optional_callback, 0})
assert Module.overridable?(__MODULE__, {:required_macro_callback, 1})
refute Module.overridable?(__MODULE__, {:optional_macro_callback, 1})
refute Module.overridable?(__MODULE__, {:not_a_behaviour_callback, 1})
end
end
test "undefined module can't be passed as argument to defoverridable" do
message =
"cannot pass module Kernel.OverridableTest.Bar as argument to defoverridable/1 because it was not defined"
assert_raise ArgumentError, message, fn ->
Code.eval_string("""
defmodule Kernel.OverridableTest.Foo do
defoverridable Kernel.OverridableTest.Bar
end
""")
end
purge(Kernel.OverridableTest.Foo)
end
test "module without @behaviour can't be passed as argument to defoverridable" do
message =
"cannot pass module Kernel.OverridableExampleBehaviour as argument to defoverridable/1" <>
" because its corresponding behaviour is missing. Did you forget to add " <>
"@behaviour Kernel.OverridableExampleBehaviour?"
assert_raise ArgumentError, message, fn ->
Code.eval_string("""
defmodule Kernel.OverridableTest.Foo do
defoverridable Kernel.OverridableExampleBehaviour
end
""")
end
purge(Kernel.OverridableTest.Foo)
end
test "module with no callbacks can't be passed as argument to defoverridable" do
message =
"cannot pass module Kernel.OverridableTest.Bar as argument to defoverridable/1 because it does not define any callbacks"
assert_raise ArgumentError, message, fn ->
Code.eval_string("""
defmodule Kernel.OverridableTest.Bar do
end
defmodule Kernel.OverridableTest.Foo do
@behaviour Kernel.OverridableTest.Bar
defoverridable Kernel.OverridableTest.Bar
end
""")
end
purge(Kernel.OverridableTest.Bar)
purge(Kernel.OverridableTest.Foo)
end
test "atom which is not a module can't be passed as argument to defoverridable" do
message = "cannot pass module :abc as argument to defoverridable/1 because it was not defined"
assert_raise ArgumentError, message, fn ->
Code.eval_string("""
defmodule Kernel.OverridableTest.Foo do
defoverridable :abc
end
""")
end
purge(Kernel.OverridableTest.Foo)
end
end
| 25.503717 | 126 | 0.663946 |
f7a6f0147a30f39bacae27c3af57b907a60d32be | 1,604 | ex | Elixir | lib/policr_mini/businesses/custom_kit_bussiness.ex | skyplaying/policr-mini | ac265daa251fd76b770d0ce08c67075a6a57f796 | [
"MIT"
] | null | null | null | lib/policr_mini/businesses/custom_kit_bussiness.ex | skyplaying/policr-mini | ac265daa251fd76b770d0ce08c67075a6a57f796 | [
"MIT"
] | 2 | 2022-02-25T06:15:30.000Z | 2022-02-25T06:15:33.000Z | lib/policr_mini/businesses/custom_kit_bussiness.ex | skyplaying/policr-mini | ac265daa251fd76b770d0ce08c67075a6a57f796 | [
"MIT"
] | null | null | null | defmodule PolicrMini.CustomKitBusiness do
@moduledoc """
自定义验证套件的业务功能实现。
"""
use PolicrMini, business: PolicrMini.Schema.CustomKit
import Ecto.Query, only: [from: 2, dynamic: 2]
@type writed_result :: {:ok, CustomKit.t()} | {:error, Ecto.Changeset.t()}
@max_count 55
@spec create(map()) :: writed_result | {:error, %{description: String.t()}}
def create(params) do
chat_id = params[:chat_id] || params["chat_id"]
if find_count(chat_id: chat_id) >= @max_count do
{:error, %{description: "the total number of custom kits has reached the upper limit"}}
else
%CustomKit{} |> CustomKit.changeset(params) |> Repo.insert()
end
end
def update(%CustomKit{} = custom_kit, params) do
custom_kit |> CustomKit.changeset(params) |> Repo.update()
end
def delete(%CustomKit{} = custom_kit) do
custom_kit |> Repo.delete()
end
@spec find_list(integer) :: [CustomKit.t()]
def find_list(chat_id) when is_integer(chat_id) or is_binary(chat_id) do
from(c in CustomKit, where: c.chat_id == ^chat_id) |> Repo.all()
end
def random_one(chat_id) do
from(c in CustomKit, where: c.chat_id == ^chat_id, order_by: fragment("RANDOM()"), limit: 1)
|> Repo.one()
end
@type find_count_opts :: [{:chat_id, integer()}]
# TODO: 添加测试
@spec find_count(any) :: integer()
def find_count(options \\ []) do
filter_chat_id =
if chat_id = Keyword.get(options, :chat_id),
do: dynamic([c], c.chat_id == ^chat_id),
else: true
from(c in CustomKit, select: count(c.id), where: ^filter_chat_id) |> Repo.one()
end
end
| 28.642857 | 96 | 0.65399 |
f7a6f417625cdd4cb510c23a9e550eead034f102 | 889 | ex | Elixir | lib/hedwig_simple_responders/responders/slogan.ex | HeroicEric/hedwig_simple_responders | f939b1a88a93651ee7406ae8dd85f58c759bce45 | [
"MIT",
"Unlicense"
] | null | null | null | lib/hedwig_simple_responders/responders/slogan.ex | HeroicEric/hedwig_simple_responders | f939b1a88a93651ee7406ae8dd85f58c759bce45 | [
"MIT",
"Unlicense"
] | null | null | null | lib/hedwig_simple_responders/responders/slogan.ex | HeroicEric/hedwig_simple_responders | f939b1a88a93651ee7406ae8dd85f58c759bce45 | [
"MIT",
"Unlicense"
] | null | null | null | defmodule HedwigSimpleResponders.Slogan do
@moduledoc """
`slogan <brand>` - Generate a killer slogan
"""
use Hedwig.Responder
@slogan_endpoint "https://4krgs6alv6.execute-api.us-west-2.amazonaws.com/prod/slogan"
@usage """
slogan <brand> - Generates a slogan for your awesome brand
"""
hear ~r/^slogan (?<brand>.*)/i, message do
brand = message.matches["brand"]
send message, fetch(URI.encode("#{@slogan_endpoint}?#{URI.encode_query(%{q: brand})}"))
end
@doc false
defp fetch(url) do
:inets.start()
:ssl.start()
case :httpc.request(:get, {String.to_charlist(url),
[{'User-Agent', 'Hedwig (Elixir/#{System.version})'},
{'Accept', 'application/json'}]}, [], []) do
{:ok, {_, _, body}} ->
body
|> to_string
|> URI.decode
_ ->
"Unable to generate a slogan"
end
end
end | 26.939394 | 91 | 0.5973 |
f7a747d9f8ca43cf43543d784012be37655bf9e3 | 19,395 | ex | Elixir | lib/ecto.ex | ohta-rh/ecto | d5f8bfdfcc6fcfb520c62bbd1dbdd8ee6f09de59 | [
"Apache-2.0"
] | null | null | null | lib/ecto.ex | ohta-rh/ecto | d5f8bfdfcc6fcfb520c62bbd1dbdd8ee6f09de59 | [
"Apache-2.0"
] | null | null | null | lib/ecto.ex | ohta-rh/ecto | d5f8bfdfcc6fcfb520c62bbd1dbdd8ee6f09de59 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto do
@moduledoc ~S"""
Ecto is split into 4 main components:
* `Ecto.Repo` - repositories are wrappers around the data store.
Via the repository, we can create, update, destroy and query existing entries.
A repository needs an adapter and credentials to communicate to the database
* `Ecto.Schema` - schemas are used to map any data source into an Elixir
struct. We will often use them to map tables into Elixir data but that's
one of their use cases and not a requirement for using Ecto
* `Ecto.Changeset` - changesets provide a way for developers to filter
and cast external parameters, as well as a mechanism to track and
validate changes before they are applied to your data
* `Ecto.Query` - written in Elixir syntax, queries are used to retrieve
information from a given repository. Queries in Ecto are secure, avoiding
common problems like SQL Injection, while still being composable, allowing
developers to build queries piece by piece instead of all at once
In the following sections, we will provide an overview of those components and
how they interact with each other. Feel free to access their respective module
documentation for more specific examples, options and configuration.
If you want to quickly check a sample application using Ecto, please check
the [getting started guide](http://hexdocs.pm/ecto/getting-started.html) and
the accompanying sample application.
## Repositories
`Ecto.Repo` is a wrapper around the database. We can define a
repository as follows:
defmodule Repo do
use Ecto.Repo, otp_app: :my_app
end
Where the configuration for the Repo must be in your application
environment, usually defined in your `config/config.exs`:
config :my_app, Repo,
adapter: Ecto.Adapters.Postgres,
database: "ecto_simple",
username: "postgres",
password: "postgres",
hostname: "localhost",
# OR use a URL to connect instead
url: "postgres://postgres:postgres@localhost/ecto_simple"
Each repository in Ecto defines a `start_link/0` function that needs to be invoked
before using the repository. In general, this function is not called directly,
but used as part of your application supervision tree.
If your application was generated with a supervisor (by passing `--sup` to `mix new`)
you will have a `lib/my_app.ex` file containing the application start callback that
defines and starts your supervisor. You just need to edit the `start/2` function to
start the repo as a supervisor on your application's supervisor:
def start(_type, _args) do
import Supervisor.Spec
children = [
supervisor(Repo, [])
]
opts = [strategy: :one_for_one, name: MyApp.Supervisor]
Supervisor.start_link(children, opts)
end
## Schema
Schemas allows developers to define the shape of their data.
Let's see an example:
defmodule Weather do
use Ecto.Schema
# weather is the DB table
schema "weather" do
field :city, :string
field :temp_lo, :integer
field :temp_hi, :integer
field :prcp, :float, default: 0.0
end
end
By defining a schema, Ecto automatically defines a struct with
the schema fields:
iex> weather = %Weather{temp_lo: 30}
iex> weather.temp_lo
30
The schema also allows us to interact with a repository:
iex> weather = %Weather{temp_lo: 0, temp_hi: 23}
iex> Repo.insert!(weather)
%Weather{...}
After persisting `weather` to the database, it will return a new copy of
`%Weather{}` with the primary key (the `id`) set. We can use this value
to read a struct back from the repository:
# Get the struct back
iex> weather = Repo.get Weather, 1
%Weather{id: 1, ...}
# Delete it
iex> Repo.delete!(weather)
%Weather{...}
> NOTE: by using `Ecto.Schema`, an `:id` field with type `:id` (:id means :integer) is
> generated by default, which is the primary key of the Schema. If you want
> to use a different primary key, you can declare custom `@primary_key`
> before the `schema/2` call. Consult the `Ecto.Schema` documentation
> for more information.
Notice how the storage (repository) and the data are decoupled. This provides
two main benefits:
* By having structs as data, we guarantee they are light-weight,
serializable structures. In many languages, the data is often represented
by large, complex objects, with entwined state transactions, which makes
serialization, maintenance and understanding hard;
* You do not need to define schemas in order to interact with repositories,
operations like `all`, `insert_all` and so on allow developers to directly
access and modify the data, keeping the database at your fingertips when
necessary;
## Changesets
Although in the example above we have directly inserted and deleted the
struct in the repository, operations on top of schemas are done through
changesets so Ecto can efficiently track changes.
Changesets allow developers to filter, cast, and validate changes before
we apply them to the data. Imagine the given schema:
defmodule User do
use Ecto.Schema
import Ecto.Changeset
schema "users" do
field :name
field :email
field :age, :integer
end
def changeset(user, params \\ %{}) do
user
|> cast(params, [:name, :email, :age])
|> validate_required([:name, :email])
|> validate_format(:email, ~r/@/)
|> validate_inclusion(:age, 18..100)
end
end
The `changeset/2` function first invokes `Ecto.Changeset.cast/3` with
the struct, the parameters and a list of allowed fields; this returns a changeset.
The parameters is a map with binary keys and values that will be cast based
on the type defined on the schema.
Any parameter that was not explicitly listed in the fields list will be ignored.
After casting, the changeset is given to many `Ecto.Changeset.validate_*/2`
functions that validate only the **changed fields**. In other words:
if a field was not given as a parameter, it won't be validated at all.
For example, if the params map contain only the "name" and "email" keys,
the "age" validation won't run.
Once a changeset is built, it can be given to functions like `insert` and
`update` in the repository that will return an `:ok` or `:error` tuple:
case Repo.update(changeset) do
{:ok, user} ->
# user updated
{:error, changeset} ->
# an error occurred
end
The benefit of having explicit changesets is that we can easily provide
different changesets for different use cases. For example, one
could easily provide specific changesets for registering and updating
users:
def registration_changeset(user, params) do
# Changeset on create
end
def update_changeset(user, params) do
# Changeset on update
end
Changesets are also capable of transforming database constraints,
like unique indexes and foreign key checks, into errors. Allowing
developers to keep their database consistent while still providing
proper feedback to end users. Check `Ecto.Changeset.unique_constraint/3`
for some examples as well as the other `_constraint` functions.
## Query
Last but not least, Ecto allows you to write queries in Elixir and send
them to the repository, which translates them to the underlying database.
Let's see an example:
import Ecto.Query, only: [from: 2]
query = from u in User,
where: u.age > 18 or is_nil(u.email),
select: u
# Returns %User{} structs matching the query
Repo.all(query)
In the example above we relied on our schema but queries can also be
made directly against a table by giving the table name as a string. In
such cases, the data to be fetched must be explicitly outlined:
query = from u in "users",
where: u.age > 18 or is_nil(u.email),
select: %{name: u.name, age: u.age}
# Returns maps as defined in select
Repo.all(query)
Queries are defined and extended with the `from` macro. The supported
keywords are:
* `:distinct`
* `:where`
* `:order_by`
* `:offset`
* `:limit`
* `:lock`
* `:group_by`
* `:having`
* `:join`
* `:select`
* `:preload`
Examples and detailed documentation for each of those are available
in the `Ecto.Query` module. Functions supported in queries are listed
in `Ecto.Query.API`.
When writing a query, you are inside Ecto's query syntax. In order to
access params values or invoke Elixir functions, you need to use the `^`
operator, which is overloaded by Ecto:
def min_age(min) do
from u in User, where: u.age > ^min
end
Besides `Repo.all/1` which returns all entries, repositories also
provide `Repo.one/1` which returns one entry or nil, `Repo.one!/1`
which returns one entry or raises, `Repo.get/2` which fetches
entries for a particular ID and more.
Finally, if you need a escape hatch, Ecto provides fragments
(see `Ecto.Query.API.fragment/1`) to inject SQL (and non-SQL)
fragments into queries. Also, most adapters provide direct
APIs for queries, like `Ecto.Adapters.SQL.query/4`, allowing
developers to completely bypass Ecto queries.
## Other topics
### Associations
Ecto supports defining associations on schemas:
defmodule Post do
use Ecto.Schema
schema "posts" do
has_many :comments, Comment
end
end
defmodule Comment do
use Ecto.Schema
schema "comments" do
field :title, :string
belongs_to :post, Post
end
end
When an association is defined, Ecto also defines a field in the schema
with the association name. By default, associations are not loaded into
this field:
iex> post = Repo.get(Post, 42)
iex> post.comments
#Ecto.Association.NotLoaded<...>
However, developers can use the preload functionality in queries to
automatically pre-populate the field:
Repo.all from p in Post, preload: [:comments]
Preloading can also be done with a pre-defined join value:
Repo.all from p in Post,
join: c in assoc(p, :comments),
where: c.votes > p.votes,
preload: [comments: c]
Finally, for the simple cases, preloading can also be done after
a collection was fetched:
posts = Repo.all(Post) |> Repo.preload(:comments)
The `Ecto` module also provides conveniences for working
with associations. For example, `Ecto.assoc/2` returns a query
with all associated data to a given struct:
import Ecto
# Get all comments for the given post
Repo.all assoc(post, :comments)
# Or build a query on top of the associated comments
query = from c in assoc(post, :comments), where: not is_nil(c.title)
Repo.all(query)
Another function in `Ecto` is `build_assoc/3`, which allows
someone to build an associated struct with the proper fields:
Repo.transaction fn ->
post = Repo.insert!(%Post{title: "Hello", body: "world"})
# Build a comment from post
comment = Ecto.build_assoc(post, :comments, body: "Excellent!")
Repo.insert!(comment)
end
In the example above, `Ecto.build_assoc/3` is equivalent to:
%Comment{post_id: post.id, body: "Excellent!"}
You can find more information about defining associations and each
respective association module in `Ecto.Schema` docs.
> NOTE: Ecto does not lazy load associations. While lazily loading
> associations may sound convenient at first, in the long run it
> becomes a source of confusion and performance issues.
### Embeds
Ecto also supports embeds. While associations keep parent and child
entries in different tables, embeds stores the child along side the
parent.
Databases like Mongo have native support for embeds. Databases
like PostgreSQL uses a mixture of JSONB (`embeds_one/3`) and ARRAY
columns to provide this functionality.
Check `Ecto.Schema.embeds_one/3` and `Ecto.Schema.embeds_many/3`
for more information.
### Mix tasks and generators
Ecto provides many tasks to help your workflow as well as code generators.
You can find all available tasks by typing `mix help` inside a project
with Ecto listed as a dependency.
Ecto generators will automatically open the generated files if you have
`ECTO_EDITOR` set in your environment variable.
#### Migrations
Ecto supports database migrations. You can generate a migration with:
$ mix ecto.gen.migration create_posts
This will create a new file inside `priv/repo/migrations` with the `change`
function. Check `Ecto.Migration` for more information.
#### Repo resolution
Ecto requires developers to specify the key `:ecto_repos` in their application
configuration before using tasks like `ecto.create` and `ecto.migrate`. For example:
config :my_app, :ecto_repos, [MyApp.Repo]
config :my_app, MyApp.Repo,
adapter: Ecto.Adapters.Postgres,
database: "ecto_simple",
username: "postgres",
password: "postgres",
hostname: "localhost"
"""
@doc """
Returns the schema primary keys as a keyword list.
"""
@spec primary_key(Ecto.Schema.t) :: Keyword.t
def primary_key(%{__struct__: schema} = struct) do
Enum.map schema.__schema__(:primary_key), fn(field) ->
{field, Map.fetch!(struct, field)}
end
end
@doc """
Returns the schema primary keys as a keyword list.
Raises `Ecto.NoPrimaryKeyFieldError` if the schema has no
primary key field.
"""
@spec primary_key!(Ecto.Schema.t) :: Keyword.t | no_return
def primary_key!(%{__struct__: schema} = struct) do
case primary_key(struct) do
[] -> raise Ecto.NoPrimaryKeyFieldError, schema: schema
pk -> pk
end
end
@doc """
Builds a struct from the given `assoc` in `struct`.
## Examples
If the relationship is a `has_one` or `has_many` and
the key is set in the given struct, the key will automatically
be set in the built association:
iex> post = Repo.get(Post, 13)
%Post{id: 13}
iex> build_assoc(post, :comments)
%Comment{id: nil, post_id: 13}
Note though it doesn't happen with `belongs_to` cases, as the
key is often the primary key and such is usually generated
dynamically:
iex> comment = Repo.get(Comment, 13)
%Comment{id: 13, post_id: 25}
iex> build_assoc(comment, :post)
%Post{id: nil}
You can also pass the attributes, which can be a map or
a keyword list, to set the struct's fields except the
association key.
iex> build_assoc(post, :comments, text: "cool")
%Comment{id: nil, post_id: 13, text: "cool"}
iex> build_assoc(post, :comments, %{text: "cool"})
%Comment{id: nil, post_id: 13, text: "cool"}
iex> build_assoc(post, :comments, post_id: 1)
%Comment{id: nil, post_id: 13}
"""
def build_assoc(%{__struct__: schema} = struct, assoc, attributes \\ %{}) do
assoc = Ecto.Association.association_from_schema!(schema, assoc)
assoc.__struct__.build(assoc, struct, drop_meta(attributes))
end
defp drop_meta(%{} = attrs), do: Map.drop(attrs, [:__struct__, :__meta__])
defp drop_meta([_|_] = attrs), do: Keyword.drop(attrs, [:__struct__, :__meta__])
@doc """
Builds a query for the association in the given struct or structs.
## Examples
In the example below, we get all comments associated to the given
post:
post = Repo.get Post, 1
Repo.all Ecto.assoc(post, :comments)
`assoc/2` can also receive a list of posts, as long as the posts are
not empty:
posts = Repo.all from p in Post, where: is_nil(p.published_at)
Repo.all Ecto.assoc(posts, :comments)
This function can also be used to dynamically load through associations
by giving it a list. For example, to get all authors for all comments for
the given posts, do:
posts = Repo.all from p in Post, where: is_nil(p.published_at)
Repo.all Ecto.assoc(posts, [:comments, :author])
"""
def assoc(struct_or_structs, assocs) do
[assoc | assocs] = List.wrap(assocs)
structs = List.wrap(struct_or_structs)
if structs == [] do
raise ArgumentError, "cannot retrieve association #{inspect assoc} for empty list"
end
schema = hd(structs).__struct__
assoc = %{owner_key: owner_key} =
Ecto.Association.association_from_schema!(schema, assoc)
values =
Enum.uniq for(struct <- structs,
assert_struct!(schema, struct),
key = Map.fetch!(struct, owner_key),
do: key)
Ecto.Association.assoc_query(assoc, assocs, nil, values)
end
@doc """
Checks if an association is loaded.
## Examples
iex> post = Repo.get(Post, 1)
iex> Ecto.assoc_loaded?(post.comments)
false
iex> post = post |> Repo.preload(:comments)
iex> Ecto.assoc_loaded?(post.comments)
true
"""
def assoc_loaded?(association) do
case association do
%Ecto.Association.NotLoaded{} -> false
_ -> true
end
end
@doc """
Gets the metadata from the given struct.
"""
def get_meta(struct, :context),
do: struct.__meta__.context
def get_meta(struct, :state),
do: struct.__meta__.state
def get_meta(struct, :source),
do: struct.__meta__.source |> elem(1)
def get_meta(struct, :prefix),
do: struct.__meta__.source |> elem(0)
@doc """
Returns a new struct with updated metadata.
It is possible to set:
* `:source` - changes the struct query source
* `:prefix` - changes the struct query prefix
* `:context` - changes the struct meta context
* `:state` - changes the struct state
"""
@spec put_meta(Ecto.Schema.t, [source: String.t, prefix: String.t,
context: term, state: :built | :loaded | :deleted]) :: Ecto.Schema.t
def put_meta(struct, opts) do
update_in struct.__meta__, &update_meta(opts, &1)
end
defp update_meta([{:state, state}|t], meta) do
if state in [:built, :loaded, :deleted] do
update_meta t, %{meta | state: state}
else
raise ArgumentError, "invalid state #{inspect state}"
end
end
defp update_meta([{:source, source}|t], %{source: {prefix, _}} = meta) do
update_meta t, %{meta | source: {prefix, source}}
end
defp update_meta([{:prefix, prefix}|t], %{source: {_, source}} = meta) do
update_meta t, %{meta | source: {prefix, source}}
end
defp update_meta([{:context, context}|t], meta) do
update_meta t, %{meta | context: context}
end
defp update_meta([], meta) do
meta
end
defp update_meta([{k, _}], _meta) do
raise ArgumentError, "unknown meta key #{inspect k}"
end
defp assert_struct!(module, %{__struct__: struct}) do
if struct != module do
raise ArgumentError, "expected a homogeneous list containing the same struct, " <>
"got: #{inspect module} and #{inspect struct}"
else
true
end
end
end
| 32.487437 | 100 | 0.676463 |
f7a74b9f24b006f4c5145364ccbaae7ce174efaa | 376 | ex | Elixir | web/views/error_view.ex | Raiszo/chatzExs | 6321f8b8f46b5c86cdbc80ab0d24d5f97ca7ebfd | [
"MIT"
] | null | null | null | web/views/error_view.ex | Raiszo/chatzExs | 6321f8b8f46b5c86cdbc80ab0d24d5f97ca7ebfd | [
"MIT"
] | null | null | null | web/views/error_view.ex | Raiszo/chatzExs | 6321f8b8f46b5c86cdbc80ab0d24d5f97ca7ebfd | [
"MIT"
] | null | null | null | defmodule ChatzExs.ErrorView do
use ChatzExs.Web, :view
def render("404.html", _assigns) do
"Page not found"
end
def render("500.html", _assigns) do
"Internal server error"
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.html", assigns
end
end
| 20.888889 | 47 | 0.699468 |
f7a74f1b48d7c795db38a75a50f24ff5d97414b7 | 1,896 | exs | Elixir | mix.exs | kianmeng/mailman | cf5b6da436fb0d3d1d816c804aa0dbab157a0150 | [
"MIT"
] | null | null | null | mix.exs | kianmeng/mailman | cf5b6da436fb0d3d1d816c804aa0dbab157a0150 | [
"MIT"
] | null | null | null | mix.exs | kianmeng/mailman | cf5b6da436fb0d3d1d816c804aa0dbab157a0150 | [
"MIT"
] | null | null | null | defmodule Mailman.Mixfile do
use Mix.Project
def project do
[
app: :mailman,
name: "Mailman",
source_url: "https://github.com/kamilc/mailman",
homepage_url: "https://github.com/kamilc/mailman",
description: "Library providing a clean way of defining mailers in Elixir apps",
package: package(),
version: "0.4.3",
elixir: "~> 1.0",
deps: deps(),
docs: docs(),
]
end
# Configuration for the OTP application
def application do
[applications: [:ssl, :crypto, :eiconv, :gen_smtp, :httpoison]]
end
# Note that :eiconv encoder/decoder is used by gen_smtp as well,
# and will not be replaced by the newer iconv (see https://github.com/gen-smtp/gen_smtp/issues/95)
#
# If the eiconv NIF fails to compile, try updating rebar:
#> mix local.rebar
#> rm -rf deps
#> rm -rf _build
#> mix deps.get
#> mix
# Returns the list of dependencies in the format:
defp deps do
[
{:eiconv, "~> 1.0.0"},
{:gen_smtp, "~> 1.0.1"},
{:ex_doc, ">= 0.19.1", only: :dev},
{:httpoison, "~> 1.6"},
{:credo, "~> 1.5.0-rc.2", only: [:dev, :test], runtime: false}
]
end
defp docs do
[
main: "overview",
formatter_opts: [gfm: true],
source_url: "https://github.com/mailman-elixir/mailman",
extras: [
"docs/Overview.md",
"docs/Email.md",
"docs/Rendering.md",
"docs/SmtpAdapter.md",
"docs/LocalTestAdapters.md",
"docs/MixConfig.md",
"docs/Delivery.md",
]
]
end
defp package do
[
files: ["lib", "docs", "LICENSE", "README", "mix.exs"],
maintainers: ["Kamil Ciemniewski <[email protected]>"],
licenses: ["MIT"],
links: %{
"GitHub" => "https://github.com/kamilc/mailman",
"Docs" => "http://hexdocs.pm/mailman"
}
]
end
end
| 25.621622 | 100 | 0.572257 |
f7a765c79ef2a0451120b6695850c527ece43931 | 1,201 | ex | Elixir | lib/webbkoll/sites/sites.ex | twigbit/webbkoll | 9faac7d0b4101f3801e78ed156665c8132197116 | [
"MIT"
] | 268 | 2016-07-08T17:14:58.000Z | 2022-02-15T19:41:24.000Z | lib/webbkoll/sites/sites.ex | twigbit/webbkoll | 9faac7d0b4101f3801e78ed156665c8132197116 | [
"MIT"
] | 29 | 2016-08-23T19:19:01.000Z | 2022-02-01T16:28:23.000Z | lib/webbkoll/sites/sites.ex | twigbit/webbkoll | 9faac7d0b4101f3801e78ed156665c8132197116 | [
"MIT"
] | 37 | 2016-08-24T21:52:45.000Z | 2021-09-08T11:27:15.000Z | defmodule Webbkoll.Sites do
alias Webbkoll.Sites.Site
def add_site(url) do
id = UUID.uuid4()
site = %Site{
id: id,
input_url: url,
try_count: 0,
status: "queue",
inserted_at: System.system_time(:microsecond)
}
ConCache.put(:site_cache, id, site)
{:ok, site}
end
def update_site(id, params) do
ConCache.update(:site_cache, id, fn old ->
{
:ok,
old |> Map.merge(params) |> Map.merge(%{updated_at: System.system_time(:microsecond)})
}
end)
end
def increment_site_tries(id) do
ConCache.update(:site_cache, id, fn old ->
{:ok, Map.update(old, :try_count, 0, &(&1 + 1))}
end)
end
def get_site(id) do
ConCache.get(:site_cache, id)
end
def get_latest_from_cache(url) do
input = get_sites_by(%{input_url: url})
input
|> Enum.filter(&is_tuple/1)
|> Enum.sort(fn x, y ->
elem(x, 1)
|> Map.get(:inserted_at) >
elem(y, 1)
|> Map.get(:inserted_at)
end)
|> List.first()
end
def get_sites_by(params) do
:ets.match_object(ConCache.ets(:site_cache), {:_, params})
end
def is_valid_id?(id) do
UUID.info(id)
end
end
| 19.688525 | 94 | 0.589509 |
f7a77672bf297aaca9b832bd8a800a1309a3cc58 | 821 | exs | Elixir | test/authentication_test.exs | scatterbrain/goldie | db649f9555d453541d01d0707d86b41f41156640 | [
"MIT"
] | null | null | null | test/authentication_test.exs | scatterbrain/goldie | db649f9555d453541d01d0707d86b41f41156640 | [
"MIT"
] | null | null | null | test/authentication_test.exs | scatterbrain/goldie | db649f9555d453541d01d0707d86b41f41156640 | [
"MIT"
] | null | null | null | defmodule AutenticationTest do
use ExUnit.Case, async: false
alias Goldie.Component.Authentication
doctest Goldie.Component.Authentication
setup do
assert {:ok, state} = Authentication.setup(%Goldie.Player{})
on_exit fn ->
assert {:ok, _state} = Authentication.teardown(state)
end
{:ok, state: state}
end
test "event register", context do
state = context.state
assert {:ok, {:register, _}, state} = Authentication.handle_event({:register, %{passwd_hash: "123456"}}, self(), state)
assert state.authenticated == true
end
test "event login", context do
state = context.state
assert {:ok, {:auth, _}, state} = Authentication.handle_event({:auth, %{id: "player123", passwd_hash: "123456"}}, self(), state)
assert state.authenticated == true
end
end
| 26.483871 | 132 | 0.676005 |
f7a7a8be5567e65ae956409f5bc1e981263fec55 | 518 | ex | Elixir | example_app/lib/example_app/presenters/email_presenter.ex | aforward-oss/addict | dfb20b7299614becc5c472564a1e6ba6d7994615 | [
"MIT"
] | null | null | null | example_app/lib/example_app/presenters/email_presenter.ex | aforward-oss/addict | dfb20b7299614becc5c472564a1e6ba6d7994615 | [
"MIT"
] | null | null | null | example_app/lib/example_app/presenters/email_presenter.ex | aforward-oss/addict | dfb20b7299614becc5c472564a1e6ba6d7994615 | [
"MIT"
] | null | null | null | defmodule ExampleApp.Presenters.EmailPresenter do
def register_template(user) do
"""
<p><b>Hi #{user.username},</b></p>
<p>Thanks for joining!</p>
<p>Cheers!</p>
<p></p>
<p>ExampleApp</p>
"""
end
def password_recovery_template(user) do
"""
<p><b>Hi #{user.username},</b></p>
<p> It seems you've lost your password! </p>
<p> Use this token <b>#{user.recovery_hash}"</b> to recover your password.</p>
<p>
<p>Cheers!</p>
<p>ExampleApp</p>
"""
end
end | 21.583333 | 82 | 0.571429 |
f7a7a8f55c82160e20a9bb18a3e6515b95fd8ed1 | 4,892 | ex | Elixir | lib/structs/emoji.ex | SpaceEEC/crux_structs | c10adb64ab6392c8601e4c53447128294daf0ae3 | [
"MIT"
] | 6 | 2018-05-22T07:13:47.000Z | 2019-10-17T03:40:22.000Z | lib/structs/emoji.ex | SpaceEEC/crux_structs | c10adb64ab6392c8601e4c53447128294daf0ae3 | [
"MIT"
] | 2 | 2018-11-01T17:11:51.000Z | 2019-06-15T13:33:31.000Z | lib/structs/emoji.ex | SpaceEEC/crux_structs | c10adb64ab6392c8601e4c53447128294daf0ae3 | [
"MIT"
] | 2 | 2018-10-29T06:53:41.000Z | 2019-06-14T19:15:56.000Z | defmodule Crux.Structs.Emoji do
@moduledoc """
Represents a Discord [Emoji Object](https://discord.com/developers/docs/resources/emoji#emoji-object).
Differences opposed to the Discord API Object:
- `:user` is just the user id
"""
@moduledoc since: "0.1.0"
@behaviour Crux.Structs
alias Crux.Structs
alias Crux.Structs.{Emoji, Reaction, Snowflake, Util}
defstruct [
:id,
:name,
:roles,
:user,
:require_colons,
:managed,
:animated,
:available
]
@typedoc since: "0.1.0"
@type t :: %__MODULE__{
id: Snowflake.t() | nil,
name: String.t(),
roles: MapSet.t(Snowflake.t()),
user: Snowflake.t() | nil,
require_colons: boolean() | nil,
managed: boolean() | nil,
animated: boolean() | nil,
available: boolean() | nil
}
@typedoc """
All available types that can be resolved into an emoji id.
"""
@typedoc since: "0.2.1"
@type id_resolvable() :: Reaction.t() | Emoji.t() | Snowflake.t() | String.t()
@doc """
Resolves the id of a `t:Crux.Structs.Emoji.t/0`.
> Automatically invoked by `Crux.Structs.resolve_id/2`.
```elixir
iex> %Crux.Structs.Emoji{id: 618731477143912448}
...> |> Crux.Structs.Emoji.resolve_id()
618731477143912448
iex> %Crux.Structs.Reaction{emoji: %Crux.Structs.Emoji{id: 618731477143912448}}
...> |> Crux.Structs.Emoji.resolve_id()
618731477143912448
iex> 618731477143912448
...> |> Crux.Structs.Emoji.resolve_id()
618731477143912448
iex> "618731477143912448"
...> |> Crux.Structs.Emoji.resolve_id()
618731477143912448
```
"""
@doc since: "0.2.1"
@spec resolve_id(id_resolvable()) :: Snowflake.t() | nil
def resolve_id(%Reaction{emoji: emoji}) do
resolve_id(emoji)
end
def resolve_id(%Emoji{id: id}) do
resolve_id(id)
end
def resolve_id(resolvable), do: Structs.resolve_id(resolvable)
@doc """
Creates a `t:Crux.Structs.Emoji.t/0` struct from raw data.
> Automatically invoked by `Crux.Structs.create/2`.
"""
@doc since: "0.1.0"
@spec create(data :: map()) :: t()
def create(data) do
emoji =
data
|> Util.atomify()
|> Map.update(:id, nil, &Snowflake.to_snowflake/1)
|> Map.update(
:roles,
MapSet.new(),
&MapSet.new(&1, fn role -> Snowflake.to_snowflake(role) end)
)
|> Map.update(:user, nil, Util.map_to_id())
struct(__MODULE__, emoji)
end
@typedoc """
All available types that can be resolved into a discord emoji identifier.
> String.t() stands for an already encoded unicode emoji.
"""
@typedoc since: "0.2.1"
@type identifier_resolvable() :: Emoji.t() | Reaction.t() | String.t()
@doc ~S"""
Converts an `t:Crux.Structs.Emoji.t/0`, a `t:Crux.Structs.Reaction.t/0`, or a `t:String.t/0` to its discord identifier format.
> This is automatically done if using a appropriate rest function.
## Examples
```elixir
# A custom emoji
iex> %Crux.Structs.Emoji{animated: false, id: 396521773216301056, name: "blobwavereverse"}
...> |> Crux.Structs.Emoji.to_identifier()
"blobwavereverse:396521773216301056"
# A custom animated emoji
iex> %Crux.Structs.Emoji{animated: true, id: 396521774466203659, name: "ablobwavereverse"}
...> |> Crux.Structs.Emoji.to_identifier()
"a:ablobwavereverse:396521774466203659"
# A regular emoji
iex> %Crux.Structs.Emoji{animated: false, id: nil, name: "👋"}
...> |> Crux.Structs.Emoji.to_identifier()
"%F0%9F%91%8B"
# A reaction struct
iex> %Crux.Structs.Reaction{
...> emoji: %Crux.Structs.Emoji{animated: false, id: 356830260626456586, name: "blobReach"}
...> }
...> |> Crux.Structs.Emoji.to_identifier()
"blobReach:356830260626456586"
# An already encoded identifier
iex> "👀"
...> |> URI.encode_www_form()
...> |> Crux.Structs.Emoji.to_identifier()
"%F0%9F%91%80"
# A custom emoji's identifier
iex> "eyesRight:271412698267254784"
...> |> Crux.Structs.Emoji.to_identifier()
"eyesRight:271412698267254784"
```
"""
@doc since: "0.1.1"
@spec to_identifier(emoji :: identifier_resolvable()) :: String.t()
def to_identifier(%Crux.Structs.Reaction{emoji: emoji}), do: to_identifier(emoji)
def to_identifier(%__MODULE__{id: nil, name: name}), do: URI.encode_www_form(name)
def to_identifier(%__MODULE__{id: id, name: name, animated: true}), do: "a:#{name}:#{id}"
def to_identifier(%__MODULE__{id: id, name: name}), do: "#{name}:#{id}"
def to_identifier(identifier) when is_bitstring(identifier), do: identifier
defimpl String.Chars, for: Crux.Structs.Emoji do
@spec to_string(Emoji.t()) :: String.t()
def to_string(%Emoji{id: nil, name: name}), do: name
def to_string(%Emoji{id: id, name: name, animated: true}),
do: "<a:#{name}:#{id}>"
def to_string(%Emoji{id: id, name: name}), do: "<:#{name}:#{id}>"
end
end
| 28.608187 | 128 | 0.647383 |
f7a7f9ab0595c353f3ff8f04a010ccf45a697213 | 3,850 | ex | Elixir | lib/chat_api_web/views/customer_view.ex | utsav0209/papercups | 66d7c33dde057b5e4309db35b5f4cccdec4a76dc | [
"MIT"
] | null | null | null | lib/chat_api_web/views/customer_view.ex | utsav0209/papercups | 66d7c33dde057b5e4309db35b5f4cccdec4a76dc | [
"MIT"
] | null | null | null | lib/chat_api_web/views/customer_view.ex | utsav0209/papercups | 66d7c33dde057b5e4309db35b5f4cccdec4a76dc | [
"MIT"
] | null | null | null | defmodule ChatApiWeb.CustomerView do
use ChatApiWeb, :view
alias ChatApiWeb.{
CompanyView,
ConversationView,
CustomerView,
MessageView,
NoteView,
TagView,
CSVHelpers
}
alias ChatApi.Companies.Company
alias ChatApi.Customers.Customer
@customer_csv_ordered_fields ~w(id name email created_at updated_at)a ++
~w(first_seen last_seen phone external_id)a ++
~w(host pathname current_url browser)a ++
~w(os ip time_zone)a
def render("index.json", %{customers: customers}) do
%{data: render_many(customers, CustomerView, "customer.json")}
end
def render("index.csv", %{customers: customers}) do
customers
|> render_many(CustomerView, "customer.json")
|> CSVHelpers.dump_csv_rfc4180(@customer_csv_ordered_fields)
end
def render("show.json", %{customer: customer}) do
%{data: render_one(customer, CustomerView, "customer.json")}
end
def render("basic.json", %{customer: customer}) do
%{
id: customer.id,
object: "customer",
name: customer.name,
email: customer.email,
created_at: customer.inserted_at,
updated_at: customer.updated_at,
phone: customer.phone,
external_id: customer.external_id,
profile_photo_url: customer.profile_photo_url,
company_id: customer.company_id,
host: customer.host,
pathname: customer.pathname,
current_url: customer.current_url,
browser: customer.browser,
os: customer.os,
metadata: customer.metadata
}
end
def render("customer.json", %{customer: customer}) do
%{
id: customer.id,
object: "customer",
name: customer.name,
email: customer.email,
created_at: customer.inserted_at,
updated_at: customer.updated_at,
first_seen: customer.first_seen,
last_seen: customer.last_seen,
last_seen_at: customer.last_seen_at,
phone: customer.phone,
external_id: customer.external_id,
profile_photo_url: customer.profile_photo_url,
company_id: customer.company_id,
host: customer.host,
pathname: customer.pathname,
current_url: customer.current_url,
browser: customer.browser,
os: customer.os,
ip: customer.ip,
metadata: customer.metadata,
time_zone: customer.time_zone
}
|> maybe_render_tags(customer)
|> maybe_render_notes(customer)
|> maybe_render_conversations(customer)
|> maybe_render_messages(customer)
|> maybe_render_company(customer)
end
defp maybe_render_tags(json, %Customer{tags: tags}) when is_list(tags),
do: Map.merge(json, %{tags: render_many(tags, TagView, "tag.json")})
defp maybe_render_tags(json, _), do: json
defp maybe_render_notes(json, %Customer{notes: notes}) when is_list(notes),
do: Map.merge(json, %{notes: render_many(notes, NoteView, "note.json")})
defp maybe_render_notes(json, _), do: json
defp maybe_render_conversations(json, %Customer{conversations: conversations})
when is_list(conversations) do
Map.merge(json, %{conversations: render_many(conversations, ConversationView, "basic.json")})
end
defp maybe_render_conversations(json, _), do: json
defp maybe_render_messages(json, %Customer{messages: messages}) when is_list(messages),
do: Map.merge(json, %{messages: render_many(messages, MessageView, "message.json")})
defp maybe_render_messages(json, _), do: json
defp maybe_render_company(json, %Customer{company: company}) do
case company do
nil ->
Map.merge(json, %{company: nil})
%Company{} = company ->
Map.merge(json, %{company: render_one(company, CompanyView, "company.json")})
_ ->
json
end
end
defp maybe_render_company(json, _), do: json
end
| 30.8 | 97 | 0.674545 |
f7a7fa8a46b69f938c1604df23e91484c6dbe0ae | 2,309 | ex | Elixir | lib/dispenser/subscription_manager.ex | LaudateCorpus1/dispenser | 5cdba1c0efa6b2c8cb2a878dc14ebee0717b8765 | [
"MIT"
] | 1 | 2021-12-05T20:55:40.000Z | 2021-12-05T20:55:40.000Z | lib/dispenser/subscription_manager.ex | LaudateCorpus1/dispenser | 5cdba1c0efa6b2c8cb2a878dc14ebee0717b8765 | [
"MIT"
] | null | null | null | lib/dispenser/subscription_manager.ex | LaudateCorpus1/dispenser | 5cdba1c0efa6b2c8cb2a878dc14ebee0717b8765 | [
"MIT"
] | 3 | 2022-01-25T14:15:52.000Z | 2022-03-17T09:38:04.000Z | defmodule Dispenser.SubscriptionManager do
@moduledoc """
`SubscriptionManager` handles monitoring and demonitoring subscribers
"""
@typedoc """
The opaque internal state of the `SubscriptionManager`.
"""
@opaque t() :: %__MODULE__{
subscribers: %{pid() => reference()}
}
@enforce_keys [:subscribers]
defstruct subscribers: %{}
@spec new() :: t()
def new() do
%__MODULE__{subscribers: %{}}
end
@doc """
Monitor the given pid using `Process.monitor/1`.
Callers must handle the :DOWN message from this pid.
"""
@spec monitor(t(), pid()) :: t()
def monitor(%__MODULE__{} = state, subscriber) when is_pid(subscriber) do
if Map.has_key?(state.subscribers, subscriber) do
state
else
ref = Process.monitor(subscriber)
subscribers = Map.put(state.subscribers, subscriber, ref)
%__MODULE__{state | subscribers: subscribers}
end
end
@doc """
Stop monitoring the given subscriber.
"""
@spec demonitor(t(), pid()) :: {:ok, t()} | {:error, :not_subscribed}
def demonitor(%__MODULE__{} = state, subscriber) when is_pid(subscriber) do
case Map.fetch(state.subscribers, subscriber) do
{:ok, ref} ->
Process.demonitor(ref, [:flush])
subscribers = Map.delete(state.subscribers, subscriber)
state = %__MODULE__{state | subscribers: subscribers}
{:ok, state}
_ ->
{:error, :not_subscribed}
end
end
@doc """
Handle the down signal from a monitored subscriber.
"""
@spec down(t(), pid(), reference()) ::
{:ok, t()} | {:error, :wrong_ref} | {:error, :not_subscribed}
def down(%__MODULE__{} = state, subscriber, ref)
when is_pid(subscriber) and is_reference(ref) do
case Map.fetch(state.subscribers, subscriber) do
{:ok, ^ref} ->
Process.demonitor(ref, [:flush])
subscribers = Map.delete(state.subscribers, subscriber)
state = %__MODULE__{state | subscribers: subscribers}
{:ok, state}
{:ok, _ref} ->
{:error, :wrong_ref}
_ ->
{:error, :not_subscribed}
end
end
@doc """
Get the number of currently monitored subscribers.
"""
@spec size(t()) :: non_neg_integer()
def size(%__MODULE__{} = state) do
map_size(state.subscribers)
end
end
| 27.819277 | 77 | 0.624513 |
f7a806f889e3926b89d7783087fbe4a54bf110cf | 2,390 | exs | Elixir | test/phoenix_live_view/integrations/layout_test.exs | jonatanklosko/phoenix_live_view | 95d5c7ccd0ac66e04b15c7b6128d44b60767e682 | [
"MIT"
] | 2 | 2021-05-15T05:20:19.000Z | 2021-05-20T17:55:04.000Z | test/phoenix_live_view/integrations/layout_test.exs | jonatanklosko/phoenix_live_view | 95d5c7ccd0ac66e04b15c7b6128d44b60767e682 | [
"MIT"
] | 1 | 2021-05-12T12:14:27.000Z | 2021-05-12T12:14:27.000Z | test/phoenix_live_view/integrations/layout_test.exs | jonatanklosko/phoenix_live_view | 95d5c7ccd0ac66e04b15c7b6128d44b60767e682 | [
"MIT"
] | 1 | 2021-01-14T12:58:22.000Z | 2021-01-14T12:58:22.000Z | defmodule Phoenix.LiveView.LayoutTest do
use ExUnit.Case, async: true
import Phoenix.ConnTest
import Phoenix.LiveViewTest
alias Phoenix.LiveViewTest.{Endpoint, LayoutView}
@endpoint Endpoint
setup config do
{:ok,
conn: Plug.Test.init_test_session(Phoenix.ConnTest.build_conn(), config[:session] || %{})}
end
test "uses dead layout from router", %{conn: conn} do
assert_raise Plug.Conn.WrapperError,
~r"\(UndefinedFunctionError\) function UnknownView.render/2",
fn -> live(conn, "/bad_layout") end
{:ok, _, _} = live(conn, "/layout")
end
test "is picked from config on use", %{conn: conn} do
{:ok, view, html} = live(conn, "/layout")
assert html =~ ~r|^LAYOUT<div[^>]+>LIVELAYOUTSTART\-123\-The value is: 123\-LIVELAYOUTEND|
assert render_click(view, :double) ==
"LIVELAYOUTSTART-246-The value is: 246-LIVELAYOUTEND\n"
end
@tag session: %{live_layout: {LayoutView, "live-override.html"}}
test "is picked from config on mount when given a layout", %{conn: conn} do
{:ok, view, html} = live(conn, "/layout")
assert html =~
~r|^LAYOUT<div[^>]+>LIVEOVERRIDESTART\-123\-The value is: 123\-LIVEOVERRIDEEND|
assert render_click(view, :double) ==
"LIVEOVERRIDESTART-246-The value is: 246-LIVEOVERRIDEEND\n"
end
@tag session: %{live_layout: false}
test "is picked from config on mount when given false", %{conn: conn} do
{:ok, view, html} = live(conn, "/layout")
assert html =~ "The value is: 123</div>"
assert render_click(view, :double) == "The value is: 246"
end
test "is not picked from config on use for child live views", %{conn: conn} do
assert get(conn, "/parent_layout") |> html_response(200) =~
"The value is: 123</div>"
{:ok, _view, html} = live(conn, "/parent_layout")
assert html =~ "The value is: 123</div>"
end
@tag session: %{live_layout: {LayoutView, "live-override.html"}}
test "is picked from config on mount even on child live views", %{conn: conn} do
assert get(conn, "/parent_layout") |> html_response(200) =~
~r|<div[^>]+>LIVEOVERRIDESTART\-123\-The value is: 123\-LIVEOVERRIDEEND|
{:ok, _view, html} = live(conn, "/parent_layout")
assert html =~
~r|<div[^>]+>LIVEOVERRIDESTART\-123\-The value is: 123\-LIVEOVERRIDEEND|
end
end
| 35.147059 | 95 | 0.641841 |
f7a81a6ea6c1cd263e82e12879ee692b918d8a84 | 52,392 | ex | Elixir | lib/axon/layers.ex | arpieb/axon | d42cc99811cf82fd5a244031ec497276cb859da8 | [
"Apache-2.0"
] | null | null | null | lib/axon/layers.ex | arpieb/axon | d42cc99811cf82fd5a244031ec497276cb859da8 | [
"Apache-2.0"
] | null | null | null | lib/axon/layers.ex | arpieb/axon | d42cc99811cf82fd5a244031ec497276cb859da8 | [
"Apache-2.0"
] | null | null | null | defmodule Axon.Layers do
@moduledoc ~S"""
Functional implementations of common neural network layer
operations.
Layers are the building blocks of neural networks. These
functional implementations can be used to express higher-level
constructs using fundamental building blocks. Neural network
layers are stateful with respect to their parameters.
These implementations do not assume the responsibility of
managing state - instead opting to delegate this responsibility
to the caller.
Basic neural networks can be seen as a composition of functions:
input
|> dense(w1, b1)
|> relu()
|> dense(w2, b2)
|> softmax()
These kinds of models are often referred to as deep feedforward networks
or multilayer perceptrons (MLPs) because information flows forward
through the network with no feedback connections. Mathematically,
a feedforward network can be represented as:
$$f(x) = f^{(3)}(f^{(2)}(f^{(1)}(x)))$$
You can see a similar pattern emerge if we condense the call stack
in the previous example:
softmax(dense(relu(dense(input, w1, b1)), w2, b2))
The chain structure shown here is the most common structure used
in neural networks. You can consider each function $f^{(n)}$ as a
*layer* in the neural network - for example $f^{(2)} is the 2nd
layer in the network. The number of function calls in the
structure is the *depth* of the network. This is where the term
*deep learning* comes from.
Neural networks are often written as the mapping:
$$y = f(x; \theta)$$
Where $x$ is the input to the neural network and $\theta$ are the
set of learned parameters. In Elixir, you would write this:
y = model(input, params)
From the previous example, `params` would represent the collection:
{w1, b1, w2, b2}
where `w1` and `w2` are layer *weights*, and `b1` and `b2` are layer
*biases*.
"""
import Nx.Defn
import Axon.Shared
## Linear
@doc ~S"""
Functional implementation of a dense layer.
Linear transformation of the input such that:
$$y = xW^T + b$$
A dense layer or fully connected layer transforms
the input using the given weight matrix and bias
to compute:
Nx.dot(input, weight) + bias
Typically, both `weight` and `bias` are learnable
parameters trained using gradient-based optimization.
## Parameter Shapes
* `input` - `{batch_size, ..., input_features}`
* `weight` - `{input_features, output_features}`
* `bias` - `{output_features}`
## Output Shape
`{batch_size, output_features}`
## Examples
iex> input = Nx.tensor([[1.0, 0.5, 1.0, 0.5], [0.0, 0.0, 0.0, 0.0]], type: {:f, 32})
iex> weight = Nx.tensor([[0.2], [0.3], [0.5], [0.8]], type: {:f, 32})
iex> bias = Nx.tensor([1.0], type: {:f, 32})
iex> Axon.Layers.dense(input, weight, bias)
#Nx.Tensor<
f32[2][1]
[
[2.25],
[1.0]
]
>
"""
@doc type: :linear
defn dense(input, weight, bias) do
input
|> Nx.dot([Nx.rank(input) - 1], weight, [0])
|> Nx.add(bias)
end
@doc ~S"""
Functional implementation of a bilinear layer.
Bilinear transformation of the input such that:
$$y = x_1^{T}Ax_2 + b$$
## Parameter Shapes
* `input1` - `{batch_size, ..., input1_features}`
* `input2` - `{batch_size, ..., input2_features}`
* `weight` - `{out_features, input1_features, input2_features}`
## Output Shape
`{batch_size, ..., output_features}`
## Examples
iex> inp1 = Nx.iota({3, 2}, type: {:f, 32})
iex> inp2 = Nx.iota({3, 4}, type: {:f, 32})
iex> weight = Nx.iota({1, 2, 4}, type: {:f, 32})
iex> bias = Nx.tensor(1.0)
iex> Axon.Layers.bilinear(inp1, inp2, weight, bias)
#Nx.Tensor<
f32[3][1]
[
[39.0],
[455.0],
[1319.0]
]
>
"""
@doc type: :linear
defn bilinear(input1, input2, weight, bias) do
inp1_axes = transform(Nx.rank(input1), fn rank -> [rank - 1] end)
inp2_axes = transform(Nx.rank(input2), fn rank -> [rank - 1] end)
input1
|> Nx.dot(inp1_axes, [], weight, [1], [])
|> Nx.dot([2], [0], input2, inp2_axes, [0])
|> Nx.add(bias)
end
## Convolutional
@doc """
Functional implementation of a general dimensional convolutional
layer.
Convolutional layers can be described as applying a convolution
over an input signal composed of several input planes. Intuitively,
the input kernel slides `output_channels` number of filters over
the input tensor to extract features from the input tensor.
Convolutional layers are most commonly used in computer vision,
but can also be useful when working with sequences and other input signals.
## Parameter Shapes
* `input` - `{batch_size, input_channels, input_spatial0, ..., input_spatialN}`
* `weight` - `{output_channels, input_channels, kernel_spatial0, ..., kernel_spatialN}`
* `bias` - `{output_channels}` or `{}`
## Options
* `:strides` - kernel strides. Can be a scalar or a list
who's length matches the number of spatial dimensions in
the input tensor. Defaults to 1.
* `:padding` - zero padding on the input. Can be one of
`:valid`, `:same` or a general padding configuration
without interior padding for each spatial dimension
of the input.
* `:input_dilation` - input dilation factor. Equivalent
to applying interior padding on the input. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
* `:kernel_dilation` - kernel dilation factor. Equivalent
to applying interior padding on the kernel. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
## Examples
### One-dimensional convolution
iex> input = Nx.tensor([[[0.1294, -0.6638, 1.0251]], [[ 0.9182, 1.1512, -1.6149]]], type: {:f, 32})
iex> weight = Nx.tensor([[[-1.5475, 1.2425]], [[0.1871, 0.5458]], [[-0.4488, 0.8879]]], type: {:f, 32})
iex> bias = Nx.tensor([0.7791, 0.1676, 1.5971], type: {:f, 32})
iex> Axon.Layers.conv(input, weight, bias)
#Nx.Tensor<
f32[2][3][2]
[
[
[-0.24591797590255737, 3.08001708984375],
[-0.1704912781715393, 0.6029025316238403],
[0.9496372938156128, 2.80519962310791]
],
[
[0.7885514497756958, -3.0088953971862793],
[0.9677201509475708, -0.4984228312969208],
[2.207162380218506, -0.3534282445907593]
]
]
>
### Two-dimensional convolution
iex> input = Nx.tensor([[[[-1.0476, -0.5041], [-0.9336, 1.5907]]]], type: {:f, 32})
iex> weight = Nx.tensor([
...> [[[0.7514, 0.7356], [1.3909, 0.6800]]],
...> [[[-0.3450, 0.4551], [-0.6275, -0.9875]]],
...> [[[1.8587, 0.4722], [0.6058, -1.0301]]]
...> ], type: {:f, 32})
iex> bias = Nx.tensor([1.9564, 0.2822, -0.5385], type: {:f, 32})
iex> Axon.Layers.conv(input, weight, bias)
#Nx.Tensor<
f32[1][3][1][1]
[
[
[
[0.5815491676330566]
],
[
[-0.5707762241363525]
],
[
[-4.927865028381348]
]
]
]
>
### Three-dimensional convolution
iex> input = Nx.tensor([[[[[-0.6497], [1.0939]], [[-2.5465], [0.7801]]]]], type: {:f, 32})
iex> weight = Nx.tensor([
...> [[[[ 0.7390], [-0.0927]], [[-0.8675], [-0.9209]]]],
...> [[[[-0.6638], [0.4341]], [[0.6368], [1.1846]]]]
...> ], type: {:f, 32})
iex> bias = Nx.tensor([-0.4101, 0.1776], type: {:f, 32})
iex> Axon.Layers.conv(input, weight, bias)
#Nx.Tensor<
f32[1][2][1][1][1]
[
[
[
[
[0.49906185269355774]
]
],
[
[
[0.38622811436653137]
]
]
]
]
>
"""
@doc type: :convolutional
defn conv(input, weight, bias, opts \\ []) do
opts =
keyword!(opts,
strides: 1,
padding: :valid,
input_dilation: 1,
kernel_dilation: 1,
feature_group_size: 1,
batch_group_size: 1
)
bias_reshape =
transform({Nx.shape(bias), Nx.rank(input) - 2}, fn {bias_shape, rank} ->
Axon.Shape.conv_bias_reshape(bias_shape, rank)
end)
input
|> Nx.conv(weight,
strides: opts[:strides],
padding: opts[:padding],
input_dilation: opts[:input_dilation],
kernel_dilation: opts[:kernel_dilation],
feature_group_size: opts[:feature_group_size],
batch_group_size: opts[:batch_group_size]
)
|> Nx.add(Nx.reshape(bias, bias_reshape))
end
@doc """
Functional implementation of a general dimensional transposed
convolutional layer.
*Note: This layer is currently implemented as a fractionally strided
convolution by padding the input tensor. Please open an issue if you'd
like this behavior changed.*
Transposed convolutions are sometimes (incorrectly) referred to as
deconvolutions because it "reverses" the spatial dimensions
of a normal convolution. Transposed convolutions are a form of upsampling -
they produce larger spatial dimensions than the input tensor. They
can be thought of as a convolution in reverse - and are sometimes
implemented as the backward pass of a normal convolution.
## Options
* `:strides` - kernel strides. Can be a scalar or a list
who's length matches the number of spatial dimensions in
the input tensor. Defaults to 1.
* `:padding` - zero padding on the input. Can be one of
`:valid`, `:same` or a general padding configuration
without interior padding for each spatial dimension
of the input.
* `:input_dilation` - input dilation factor. Equivalent
to applying interior padding on the input. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
* `:kernel_dilation` - kernel dilation factor. Equivalent
to applying interior padding on the kernel. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
## Examples
iex> input = Nx.iota({1, 3, 3}, type: {:f, 32})
iex> kernel = Nx.iota({6, 3, 2}, type: {:f, 32})
iex> bias = Nx.tensor(1.0, type: {:f, 32})
iex> Axon.Layers.conv_transpose(input, kernel, bias)
#Nx.Tensor<
f32[1][6][4]
[
[
[40.0, 79.0, 94.0, 43.0],
[94.0, 205.0, 256.0, 133.0],
[148.0, 331.0, 418.0, 223.0],
[202.0, 457.0, 580.0, 313.0],
[256.0, 583.0, 742.0, 403.0],
[310.0, 709.0, 904.0, 493.0]
]
]
>
## References
* [A guide to convolution arithmethic for deep learning](https://arxiv.org/abs/1603.07285v1)
* [Deconvolutional Networks](https://www.matthewzeiler.com/mattzeiler/deconvolutionalnetworks.pdf)
"""
@doc type: :convolutional
defn conv_transpose(input, weight, bias, opts \\ []) do
assert_equal_rank!(input, weight)
opts =
keyword!(opts,
strides: 1,
padding: :valid,
kernel_dilation: 1
)
strides =
transform(
{Nx.rank(input), opts[:strides]},
fn
{_, [_ | _] = strides} -> strides
{rank, strides} -> List.duplicate(strides, rank - 2)
end
)
padding =
transform(
{Nx.shape(weight), opts[:kernel_dilation], strides, opts[:padding]},
fn {shape, k_dilation, strides, padding} ->
Axon.Shape.conv_transpose_padding(shape, k_dilation, strides, padding)
end
)
conv(input, weight, bias,
strides: opts[:strides],
padding: padding,
kernel_dilation: opts[:kernel_dilation]
)
end
@doc """
Functional implementation of a general dimensional depthwise
convolution.
Depthwise convolutions apply a single convolutional filter to
each input channel. This is done by setting `feature_group_size`
equal to the number of input channels. This will split the
`output_channels` into `input_channels` number of groups and
convolve the grouped kernel channels over the corresponding input
channel.
## Parameter Shapes
* `input` - `{batch_size, input_channels, input_spatial0, ..., input_spatialN}`
* `weight` - `{output_channels, 1, kernel_spatial0, ..., kernel_spatialN}`
* `bias` - `{output_channels}` or `{}`
`output_channels` must be a multiple of the input channels.
## Options
* `:strides` - kernel strides. Can be a scalar or a list
who's length matches the number of spatial dimensions in
the input tensor. Defaults to 1.
* `:padding` - zero padding on the input. Can be one of
`:valid`, `:same` or a general padding configuration
without interior padding for each spatial dimension
of the input.
* `:input_dilation` - input dilation factor. Equivalent
to applying interior padding on the input. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
* `:kernel_dilation` - kernel dilation factor. Equivalent
to applying interior padding on the kernel. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
"""
@doc type: :convolutional
defn depthwise_conv(input, weight, bias, opts \\ []) do
assert_equal_rank!(input, weight)
opts =
keyword!(opts,
strides: 1,
padding: :valid,
input_dilation: 1,
kernel_dilation: 1
)
num_groups = transform(Nx.shape(input), &elem(&1, 1))
conv(input, weight, bias,
strides: opts[:strides],
padding: opts[:padding],
input_dilation: opts[:input_dilation],
kernel_dilation: opts[:kernel_dilation],
feature_group_size: num_groups
)
end
@doc """
Functional implementation of a 2-dimensional separable depthwise
convolution.
The 2-d depthwise separable convolution performs 2 depthwise convolutions
each over 1 spatial dimension of the input.
## Parameter Shapes
* `input` - `{batch_size, input_channels, input_spatial0, ..., input_spatialN}`
* `k1` - `{output_channels, 1, kernel_spatial0, 1}`
* `b1` - `{output_channels}` or `{}`
* `k2` - `{output_channels, 1, 1, kernel_spatial1}`
* `b2` - `{output_channels}` or `{}`
`output_channels` must be a multiple of the input channels.
## Options
* `:strides` - kernel strides. Can be a scalar or a list
who's length matches the number of spatial dimensions in
the input tensor. Defaults to 1.
* `:padding` - zero padding on the input. Can be one of
`:valid`, `:same` or a general padding configuration
without interior padding for each spatial dimension
of the input.
* `:input_dilation` - input dilation factor. Equivalent
to applying interior padding on the input. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
* `:kernel_dilation` - kernel dilation factor. Equivalent
to applying interior padding on the kernel. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
## References
* [Xception: Deep Learning with Depthwise Separable Convolutions](https://arxiv.org/abs/1610.02357)
"""
@doc type: :convolutional
defn separable_conv2d(input, k1, b1, k2, b2, opts \\ []) do
input
|> depthwise_conv(k1, b1, opts)
|> depthwise_conv(k2, b2, opts)
end
@doc """
Functional implementation of a 3-dimensional separable depthwise
convolution.
The 3-d depthwise separable convolution performs 3 depthwise convolutions
each over 1 spatial dimension of the input.
## Parameter Shapes
* `input` - `{batch_size, input_channels, input_spatial0, ..., input_spatialN}`
* `k1` - `{output_channels, 1, kernel_spatial0, 1, 1}`
* `b1` - `{output_channels}` or `{}`
* `k2` - `{output_channels, 1, 1, kernel_spatial1, 1}`
* `b2` - `{output_channels}` or `{}`
* `k3` - `{output_channels, 1, 1, 1, 1, kernel_spatial2}`
* `b3` - `{output_channels}` or `{}`
`output_channels` must be a multiple of the input channels.
## Options
* `:strides` - kernel strides. Can be a scalar or a list
who's length matches the number of spatial dimensions in
the input tensor. Defaults to 1.
* `:padding` - zero padding on the input. Can be one of
`:valid`, `:same` or a general padding configuration
without interior padding for each spatial dimension
of the input.
* `:input_dilation` - input dilation factor. Equivalent
to applying interior padding on the input. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
* `:kernel_dilation` - kernel dilation factor. Equivalent
to applying interior padding on the kernel. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
## References
* [Xception: Deep Learning with Depthwise Separable Convolutions](https://arxiv.org/abs/1610.02357)
"""
@doc type: :convolutional
defn separable_conv3d(input, k1, b1, k2, b2, k3, b3, opts \\ []) do
input
|> depthwise_conv(k1, b1, opts)
|> depthwise_conv(k2, b2, opts)
|> depthwise_conv(k3, b3, opts)
end
@doc """
Functional implementation of a general dimensional max pooling layer.
Pooling is applied to the spatial dimension of the input tensor.
Max pooling returns the maximum element in each valid window of
the input tensor. It is often used after convolutional layers
to downsample the input even further.
## Options
* `kernel_size` - window size. Rank must match spatial dimension
of the input tensor. Required.
* `:strides` - kernel strides. Can be a scalar or a list
who's length matches the number of spatial dimensions in
the input tensor. Defaults to size of kernel.
* `:padding` - zero padding on the input. Can be one of
`:valid`, `:same` or a general padding configuration
without interior padding for each spatial dimension
of the input.
* `:window_dilations` - kernel dilation factor. Equivalent
to applying interior padding on the kernel. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Can be scalar or list who's length matches the number of
spatial dimensions in the input tensor. Defaults to `1` or no
dilation.
## Examples
iex> t = Nx.tensor([[
...> [0.051500000059604645, -0.7042999863624573, -0.32899999618530273],
...> [-0.37130001187324524, 1.6191999912261963, -0.11829999834299088],
...> [0.7099999785423279, 0.7282999753952026, -0.18639999628067017]]], type: {:f, 32})
iex> Axon.Layers.max_pool(t, kernel_size: 2)
#Nx.Tensor<
f32[1][3][1]
[
[
[0.051500000059604645],
[1.6191999912261963],
[0.7282999753952026]
]
]
>
"""
@doc type: :pooling
defn max_pool(input, opts \\ []) do
opts =
keyword!(
opts,
[:kernel_size, strides: nil, padding: :valid, window_dilations: 1]
)
window_dimensions =
transform(
{Nx.rank(input), opts[:kernel_size]},
fn {rank, kernel_size} ->
Axon.Shape.pool_window_size(kernel_size, rank - 2)
end
)
strides =
transform(
{Nx.rank(input), opts[:strides], window_dimensions},
fn
{_, nil, dims} -> Tuple.to_list(dims)
{_, [_ | _] = strides, _} -> [1, 1 | strides]
{rank, strides, _} -> [1, 1 | List.duplicate(strides, rank - 2)]
end
)
padding =
transform(
opts[:padding],
fn
:same ->
:same
:valid ->
:valid
padding ->
[{0, 0}, {0, 0} | padding]
end
)
opts = transform(opts, &Keyword.delete(&1, :kernel_size))
input
|> Nx.window_max(window_dimensions,
strides: strides,
padding: padding,
window_dilations: opts[:window_dilations]
)
end
@doc """
A general dimensional functional average pooling layer.
Pooling is applied to the spatial dimension of the input tensor.
Average pooling returns the average of all elements in valid
windows in the input tensor. It is often used after convolutional
layers to downsample the input even further.
## Options
* `kernel_size` - window size. Rank must match spatial dimension
of the input tensor. Required.
* `:strides` - kernel strides. Can be a scalar or a list
who's length matches the number of spatial dimensions in
the input tensor. Defaults to 1.
* `:padding` - zero padding on the input. Can be one of
`:valid`, `:same` or a general padding configuration
without interior padding for each spatial dimension
of the input.
* `:window_dilations` - kernel dilation factor. Equivalent
to applying interior padding on the kernel. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Can be scalar or list who's length matches the number of
spatial dimensions in the input tensor. Defaults to `1` or no
dilation.
"""
@doc type: :pooling
defn avg_pool(input, opts \\ []) do
opts =
keyword!(
opts,
[:kernel_size, strides: nil, padding: :valid, window_dilations: 1]
)
window_dimensions =
transform(
{Nx.rank(input), opts[:kernel_size]},
fn {rank, kernel_size} ->
Axon.Shape.pool_window_size(kernel_size, rank - 2)
end
)
strides =
transform(
{Nx.rank(input), opts[:strides], window_dimensions},
fn
{_, nil, dims} -> Tuple.to_list(dims)
{_, [_ | _] = strides, _} -> [1, 1 | strides]
{rank, strides, _} -> [1, 1 | List.duplicate(strides, rank - 2)]
end
)
padding =
transform(
opts[:padding],
fn
:same ->
:same
:valid ->
:valid
padding ->
[{0, 0}, {0, 0} | padding]
end
)
opts = transform(opts, &Keyword.delete(&1, :kernel_size))
input
|> Nx.window_mean(window_dimensions,
strides: strides,
padding: padding,
window_dilations: opts[:window_dilations]
)
end
@doc ~S"""
Functional implementation of a general dimensional power average
pooling layer.
Pooling is applied to the spatial dimension of the input tensor.
Power average pooling computes the following function on each
valid window of the input tensor:
$$f(X) = \sqrt[p]{\sum_{x \in X} x^{p}}$$
Where $p$ is given by the keyword argument `:norm`. As $p$ approaches
infinity, it becomes equivalent to max pooling.
## Options
* `:norm` - $p$ from above equation. Defaults to 2.
* `:kernel_size` - window size. Rank must match spatial dimension
of the input tensor. Required.
* `:strides` - kernel strides. Can be a scalar or a list
who's length matches the number of spatial dimensions in
the input tensor. Defaults to size of kernel.
* `:padding` - zero padding on the input. Can be one of
`:valid`, `:same` or a general padding configuration
without interior padding for each spatial dimension
of the input.
* `:window_dilations` - kernel dilation factor. Equivalent
to applying interior padding on the kernel. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Can be scalar or list who's length matches the number of
spatial dimensions in the input tensor. Defaults to `1` or no
dilation.
## Examples
iex> t = Nx.tensor([[[0.9450, 0.4684, 1.8146], [1.2663, 0.4354, -0.0781], [-0.4759, 0.3251, 0.8742]]], type: {:f, 32})
iex> Axon.Layers.lp_pool(t, kernel_size: 2, norm: 2)
#Nx.Tensor<
f32[1][3][1]
[
[
[1.0547149181365967],
[1.3390626907348633],
[0.5763426423072815]
]
]
>
"""
@doc type: :pooling
defn lp_pool(input, opts \\ []) do
opts =
keyword!(
opts,
[:kernel_size, strides: nil, padding: :valid, window_dilations: 1, norm: 2]
)
window_dimensions =
transform(
{Nx.rank(input), opts[:kernel_size]},
fn {rank, kernel_size} ->
Axon.Shape.pool_window_size(kernel_size, rank - 2)
end
)
strides =
transform(
{Nx.rank(input), opts[:strides], window_dimensions},
fn
{_, nil, dims} -> Tuple.to_list(dims)
{_, [_ | _] = strides, _} -> [1, 1 | strides]
{rank, strides, _} -> [1, 1 | List.duplicate(strides, rank - 2)]
end
)
padding =
transform(
opts[:padding],
fn
:same ->
:same
:valid ->
:valid
padding ->
[{0, 0}, {0, 0} | padding]
end
)
norm = opts[:norm]
opts =
opts
|> transform(&Keyword.delete(&1, :kernel_size))
|> transform(&Keyword.delete(&1, :norm))
input
|> Nx.power(norm)
|> Nx.window_sum(window_dimensions,
strides: strides,
padding: padding,
window_dilations: opts[:window_dilations]
)
|> Nx.power(Nx.divide(Nx.tensor(1, type: Nx.type(input)), norm))
end
@doc """
Functional implementation of general dimensional adaptive average
pooling.
Adaptive pooling allows you to specify the desired output size
of the transformed input. This will automatically adapt the
window size and strides to obtain the desired output size. It
will then perform average pooling using the calculated window
size and strides.
Adaptive pooling can be useful when working on multiple inputs with
different spatial input shapes. You can guarantee the output of
an adaptive pooling operation is always the same size regardless
of input shape.
## Options
* `:output_size` - spatial output size. Must be a tuple with
size equal to the spatial dimensions in the input tensor.
Required.
"""
@doc type: :pooling
defn adaptive_avg_pool(input, opts \\ []) do
opts = keyword!(opts, [:output_size])
window_strides =
transform(
{Nx.shape(input), Nx.rank(input), opts[:output_size]},
fn {shape, rank, output_size} ->
Axon.Shape.adaptive_pool_window_strides(shape, output_size, rank - 2)
end
)
window_dimensions =
transform(
{Nx.shape(input), Nx.rank(input), window_strides, opts[:output_size]},
fn {shape, rank, strides, output_size} ->
Axon.Shape.adaptive_pool_window_size(shape, strides, output_size, rank - 2)
end
)
input
|> Nx.window_mean(window_dimensions, padding: :valid, strides: window_strides)
end
@doc """
Functional implementation of general dimensional adaptive max
pooling.
Adaptive pooling allows you to specify the desired output size
of the transformed input. This will automatically adapt the
window size and strides to obtain the desired output size. It
will then perform max pooling using the calculated window
size and strides.
Adaptive pooling can be useful when working on multiple inputs with
different spatial input shapes. You can guarantee the output of
an adaptive pooling operation is always the same size regardless
of input shape.
## Options
* `:output_size` - spatial output size. Must be a tuple with
size equal to the spatial dimensions in the input tensor.
Required.
"""
@doc type: :pooling
defn adaptive_max_pool(input, opts \\ []) do
opts = keyword!(opts, [:output_size])
window_strides =
transform(
{Nx.shape(input), Nx.rank(input), opts[:output_size]},
fn {shape, rank, output_size} ->
Axon.Shape.adaptive_pool_window_strides(shape, output_size, rank - 2)
end
)
window_dimensions =
transform(
{Nx.shape(input), Nx.rank(input), window_strides, opts[:output_size]},
fn {shape, rank, strides, output_size} ->
Axon.Shape.adaptive_pool_window_size(shape, strides, output_size, rank - 2)
end
)
input
|> Nx.window_max(window_dimensions, padding: :valid, strides: window_strides)
end
@doc """
Functional implementation of general dimensional adaptive power
average pooling.
Computes:
$$f(X) = \sqrt[p]{\sum_{x \in X} x^{p}}$$
Adaptive pooling allows you to specify the desired output size
of the transformed input. This will automatically adapt the
window size and strides to obtain the desired output size. It
will then perform max pooling using the calculated window
size and strides.
Adaptive pooling can be useful when working on multiple inputs with
different spatial input shapes. You can guarantee the output of
an adaptive pooling operation is always the same size regardless
of input shape.
## Options
* `:norm` - $p$ from above equation. Defaults to 2.
* `:output_size` - spatial output size. Must be a tuple with
size equal to the spatial dimensions in the input tensor.
Required.
"""
@doc type: :pooling
defn adaptive_lp_pool(input, opts \\ []) do
opts = keyword!(opts, [:output_size, norm: 2])
norm = opts[:norm]
window_strides =
transform(
{Nx.shape(input), Nx.rank(input), opts[:output_size]},
fn {shape, rank, output_size} ->
Axon.Shape.adaptive_pool_window_strides(shape, output_size, rank - 2)
end
)
window_dimensions =
transform(
{Nx.shape(input), Nx.rank(input), window_strides, opts[:output_size]},
fn {shape, rank, strides, output_size} ->
Axon.Shape.adaptive_pool_window_size(shape, strides, output_size, rank - 2)
end
)
input
|> Nx.power(norm)
|> Nx.window_sum(window_dimensions, padding: :valid, strides: window_strides)
|> Nx.power(Nx.divide(Nx.tensor(1, type: Nx.type(input)), norm))
end
## Normalization
@doc ~S"""
Functional implementation of batch normalization.
Normalizes the input by calculating mean and variance of the
input tensor along every dimension but the given `:channel_index`,
and then scaling according to:
$$y = \frac{x - E[x]}{\sqrt{Var[x] + \epsilon}} * \gamma + \beta$$
`gamma` and `beta` are often trainable parameters. This method does
not maintain an EMA of mean and variance.
## Options
* `:epsilon` - numerical stability term. $epsilon$ in the above
formulation.
* `:channel_index` - channel index used to determine reduction
axes for mean and variance calculation.
## References
* [Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift](https://arxiv.org/abs/1502.03167)
"""
@doc type: :normalization
defn batch_norm(input, gamma, bias, opts \\ []) do
opts = keyword!(opts, epsilon: 1.0e-5, channel_index: 1)
axes =
transform({Nx.axes(input), opts[:channel_index]}, fn {axes, channel} ->
Axon.Shape.batch_norm_axes(axes, channel)
end)
channel_index = opts[:channel_index]
num_channels =
transform({input, channel_index}, fn {inp, channel_idx} ->
elem(Nx.shape(inp), channel_idx)
end)
{gamma, bias} =
transform({gamma, bias, Nx.rank(input), num_channels, channel_index}, fn {g, b, rank,
num_channels,
channel_idx} ->
new_shape =
1
|> List.duplicate(rank)
|> List.to_tuple()
|> put_elem(channel_idx, num_channels)
{Nx.reshape(g, new_shape), Nx.reshape(b, new_shape)}
end)
{mean, var} = mean_and_variance(input, axes: axes)
normalize(input, mean, var, gamma, bias, epsilon: opts[:epsilon])
end
@doc ~S"""
Functional implementation of layer normalization.
Normalizes the input by calculating mean and variance of the
input tensor along the given feature dimension `:channel_index`.
$$y = \frac{x - E[x]}{\sqrt{Var[x] + \epsilon}} * \gamma + \beta$$
`gamma` and `beta` are often trainable parameters. This method does
not maintain an EMA of mean and variance.
## Options
* `:epsilon` - numerical stability term. $epsilon$ in the above
formulation.
* `:channel_index` - channel index used to determine reduction
axes for mean and variance calculation.
"""
@doc type: :normalization
defn layer_norm(input, gamma, bias, opts \\ []) do
opts = keyword!(opts, epsilon: 1.0e-5, channel_index: 1)
axes = opts[:channel_index]
channel_index = opts[:channel_index]
num_channels =
transform({input, channel_index}, fn {inp, channel_idx} ->
elem(Nx.shape(inp), channel_idx)
end)
{gamma, bias} =
transform({gamma, bias, Nx.rank(input), num_channels, channel_index}, fn {g, b, rank,
num_channels,
channel_idx} ->
new_shape =
1
|> List.duplicate(rank)
|> List.to_tuple()
|> put_elem(channel_idx, num_channels)
{Nx.reshape(g, new_shape), Nx.reshape(b, new_shape)}
end)
{mean, var} = mean_and_variance(input, axes: [axes])
normalize(input, mean, var, gamma, bias, epsilon: opts[:epsilon])
end
@doc """
Functional implementation of group normalization.
Normalizes the input by reshaping input into groups of given
`:group_size` and then calculating the mean and variance along
every dimension but the input batch dimension.
$$y = \frac{x - E[x]}{\sqrt{Var[x] + \epsilon}} * \gamma + \beta$$
`gamma` and `beta` are often trainable parameters. This method does
not maintain an EMA of mean and variance.
## Options
* `:group_size` - channel group size. Size of each group to split
input channels into.
* `:epsilon` - numerical stability term. $epsilon$ in the above
formulation.
* `:channel_index` - channel index used to determine reduction
axes and group shape for mean and variance calculation.
## References
* [Group Normalization](https://arxiv.org/abs/1803.08494v3)
"""
@doc type: :normalization
defn group_norm(input, gamma, bias, opts \\ []) do
opts = keyword!(opts, [:group_size, epsilon: 1.0e-5, channel_index: 1])
group_shape =
transform({Nx.shape(input), opts[:group_size], opts[:channel_index]}, fn {shape, groups,
channel} ->
Axon.Shape.group_norm_shape(shape, groups, channel)
end)
channel_index = opts[:channel_index]
num_channels =
transform({input, channel_index}, fn {inp, channel_idx} ->
elem(Nx.shape(inp), channel_idx)
end)
{gamma, bias} =
transform({gamma, bias, Nx.rank(input), num_channels, channel_index}, fn {g, b, rank,
num_channels,
channel_idx} ->
new_shape =
1
|> List.duplicate(rank)
|> List.to_tuple()
|> put_elem(channel_idx, num_channels)
{Nx.reshape(g, new_shape), Nx.reshape(b, new_shape)}
end)
x = Nx.reshape(input, group_shape)
axes = transform(Nx.rank(x), &Axon.Shape.group_norm_axes/1)
{mean, var} = mean_and_variance(x, axes: axes)
normalize(Nx.reshape(x, input), mean, var, gamma, bias, epsilon: opts[:epsilon])
end
@doc """
Functional implementation of instance normalization.
Normalizes the input by calculating mean and variance of the
input tensor along the spatial dimensions of the input.
$$y = \frac{x - E[x]}{\sqrt{Var[x] + \epsilon}} * \gamma + \beta$$
`gamma` and `beta` are often trainable parameters. This method does
not maintain an EMA of mean and variance.
## Options
* `:epsilon` - numerical stability term. $epsilon$ in the above
formulation.
* `:channel_index` - channel index used to determine reduction
axes for mean and variance calculation.
## References
* [Instance Normalization: The Missing Ingredient for Fast Stylization](https://arxiv.org/abs/1607.08022v3)
"""
@doc type: :normalization
defn instance_norm(input, gamma, bias, opts \\ []) do
opts = keyword!(opts, epsilon: 1.0e-5, channel_index: 1)
axes =
transform({Nx.axes(input), opts[:channel_index]}, fn {axes, channel} ->
Axon.Shape.instance_norm_axes(axes, channel)
end)
channel_index = opts[:channel_index]
num_channels =
transform({input, channel_index}, fn {inp, channel_idx} ->
elem(Nx.shape(inp), channel_idx)
end)
{gamma, bias} =
transform({gamma, bias, Nx.rank(input), num_channels, channel_index}, fn {g, b, rank,
num_channels,
channel_idx} ->
new_shape =
1
|> List.duplicate(rank)
|> List.to_tuple()
|> put_elem(channel_idx, num_channels)
{Nx.reshape(g, new_shape), Nx.reshape(b, new_shape)}
end)
{mean, var} = mean_and_variance(input, axes: axes)
normalize(input, mean, var, gamma, bias, epsilon: opts[:epsilon])
end
## Stochastic
# TODO: Manage the state of these RNGs
@doc ~S"""
Functional implementation of a dropout layer.
Applies a mask to some elements of the input tensor with probability
`rate` and scales the input tensor by a factor of $\frac{1}{1 - rate}$.
Dropout is a form of regularization that helps prevent overfitting
by preventing models from becoming too reliant on certain connections.
Dropout can somewhat be thought of as learning an ensemble of models
with random connections masked.
## Options
* `:rate` - dropout rate. Used to determine probability a connection
will be dropped. Required.
# `:noise_shape` - input noise shape. Shape of `mask` which can be useful
for broadcasting `mask` across feature channels or other dimensions.
Defaults to shape of input tensor.
## References
* [Dropout: A Simple Way to Prevent Neural Networks from Overfitting](https://jmlr.org/papers/v15/srivastava14a.html)
"""
@doc type: :dropout
defn dropout(input, opts \\ []) do
opts = keyword!(opts, [:rate, noise_shape: Nx.shape(input)])
keep_prob = Nx.tensor(1, type: Nx.type(input)) - Nx.tensor(opts[:rate], type: Nx.type(input))
mask = Nx.less(Nx.random_uniform(opts[:noise_shape], type: Nx.type(input)), keep_prob)
mask =
transform(
{mask, Nx.shape(input)},
fn {mask, input_shape} ->
if Elixir.Kernel.==(Nx.shape(mask), input_shape),
do: mask,
else: Nx.broadcast(mask, input_shape)
end
)
Nx.select(mask, input / keep_prob, Nx.tensor(0, type: Nx.type(input)))
end
@doc """
Functional implementation of an n-dimensional spatial
dropout layer.
Applies a mask to entire feature maps instead of individual
elements. This is done by calculating a mask shape equal to
the spatial dimensions of the input tensor with 1 channel,
and then broadcasting the mask across the feature dimension
of the input tensor.
## Options
* `:rate` - dropout rate. Used to determine probability a connection
will be dropped. Required.
# `:noise_shape` - input noise shape. Shape of `mask` which can be useful
for broadcasting `mask` across feature channels or other dimensions.
Defaults to shape of input tensor.
## References
* [Efficient Object Localization Using Convolutional Networks](https://arxiv.org/abs/1411.4280)
"""
@doc type: :dropout
defn spatial_dropout(input, opts \\ []) do
opts = keyword!(opts, rate: 0.5)
noise_shape = transform(Nx.shape(input), &Axon.Shape.spatial_dropout_noise_shape/1)
dropout(input, rate: opts[:rate], noise_shape: noise_shape)
end
@doc """
Functional implementation of an alpha dropout layer.
Alpha dropout is a type of dropout that forces the input
to have zero mean and unit standard deviation. Randomly
masks some elements and scales to enforce self-normalization.
## Options
* `:rate` - dropout rate. Used to determine probability a connection
will be dropped. Required.
# `:noise_shape` - input noise shape. Shape of `mask` which can be useful
for broadcasting `mask` across feature channels or other dimensions.
Defaults to shape of input tensor.
## References
* [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)
"""
@doc type: :dropout
defn alpha_dropout(input, opts \\ []) do
opts = keyword!(opts, rate: 0.5)
rate = opts[:rate]
alpha = Nx.tensor(1.6732632423543772848170429916717, type: Nx.type(input))
scale = Nx.tensor(1.0507009873554804934193349852946, type: Nx.type(input))
alpha_p = -alpha * scale
keep_prob = Nx.tensor(1, type: Nx.type(input)) - rate
mask = Nx.less(Nx.random_uniform(Nx.shape(input), type: Nx.type(input)), keep_prob)
a = Nx.rsqrt(keep_prob * Nx.power(Nx.tensor(1, type: Nx.type(input)) * alpha_p, 2))
b = -a * alpha_p * rate
x = Nx.select(mask, input, alpha_p)
a * x + b
end
@doc """
Functional implementation of a feature alpha dropout layer.
Feature alpha dropout applies dropout in the same manner as
spatial dropout; however, it also enforces self-normalization
by masking inputs with the SELU activation function and scaling
unmasked inputs.
## Options
* `:rate` - dropout rate. Used to determine probability a connection
will be dropped. Required.
# `:noise_shape` - input noise shape. Shape of `mask` which can be useful
for broadcasting `mask` across feature channels or other dimensions.
Defaults to shape of input tensor.
"""
@doc type: :dropout
defn feature_alpha_dropout(input, opts \\ []) do
opts = keyword!(opts, rate: 0.5)
noise_shape = transform(Nx.shape(input), &Axon.Shape.spatial_dropout_noise_shape/1)
keep_prob = 1 - opts[:rate]
mask = Nx.less(Nx.random_uniform(noise_shape, type: Nx.type(input)), keep_prob)
mask =
transform(
{mask, Nx.shape(input)},
fn {mask, input_shape} ->
if Elixir.Kernel.==(Nx.shape(mask), input_shape),
do: mask,
else: Nx.broadcast(mask, input_shape)
end
)
Nx.select(mask, input / keep_prob, Nx.negate(Axon.Activations.selu(input)))
end
## Global Pooling
@doc """
Functional implementation of global average pooling which averages across
the spatial dimensions of the input such that the only remaining dimensions
are the batch and feature dimensions.
Assumes data is configured in a channels-first like format.
## Parameter Shapes
* `input` - {batch_size, features, s1, ..., sN}
## Options
* `:keep_axes` - option to keep reduced axes with size 1 for each reduced
dimensions. Defaults to `false`
## Examples
iex> Axon.Layers.global_avg_pool(Nx.iota({3, 2, 3}, type: {:f, 32}))
#Nx.Tensor<
f32[3][2]
[
[1.0, 4.0],
[7.0, 10.0],
[13.0, 16.0]
]
>
iex> Axon.Layers.global_avg_pool(Nx.iota({1, 3, 2, 2}, type: {:f, 32}), keep_axes: true)
#Nx.Tensor<
f32[1][3][1][1]
[
[
[
[1.5]
],
[
[5.5]
],
[
[9.5]
]
]
]
>
"""
defn global_avg_pool(input, opts \\ []) do
opts = keyword!(opts, keep_axes: false)
all_but_batch_and_feature =
transform(Nx.rank(input), fn rank ->
for i <- 2..(rank - 1), do: i
end)
Nx.mean(input, axes: all_but_batch_and_feature, keep_axes: opts[:keep_axes])
end
@doc """
Functional implementation of global max pooling which computes maximums across
the spatial dimensions of the input such that the only remaning dimensions are
the batch and feature dimensions.
Assumes data is configured in a channels-first like format.
## Parameter Shapes
* `input` - {batch_size, s1, ..., sN, features}
## Options
* `:keep_axes` - option to keep reduced axes with size 1 for each reduced
dimensions. Defaults to `false`
## Examples
iex> Axon.Layers.global_max_pool(Nx.iota({3, 2, 3}, type: {:f, 32}))
#Nx.Tensor<
f32[3][2]
[
[2.0, 5.0],
[8.0, 11.0],
[14.0, 17.0]
]
>
iex> Axon.Layers.global_max_pool(Nx.iota({1, 3, 2, 2}, type: {:f, 32}), keep_axes: true)
#Nx.Tensor<
f32[1][3][1][1]
[
[
[
[3.0]
],
[
[7.0]
],
[
[11.0]
]
]
]
>
"""
defn global_max_pool(x, opts \\ []) do
opts = keyword!(opts, keep_axes: false)
all_but_batch_and_feature =
transform(Nx.rank(x), fn rank ->
for i <- 2..(rank - 1), do: i
end)
Nx.reduce_max(x, axes: all_but_batch_and_feature, keep_axes: opts[:keep_axes])
end
@doc """
Functional implementation of global LP pooling which computes the following
function across spatial dimensions of the input:
$$f(X) = \sqrt[p]{\sum_{x \in X} x^{p}}$$
Where $p$ is given by the keyword argument `:norm`. As $p$ approaches
infinity, it becomes equivalent to max pooling.
Assumes data is configured in a channels-first like format.
## Parameter Shapes
* `input` - {batch_size, s1, ..., sN, features}
## Options
* `:keep_axes` - option to keep reduced axes with size 1 for each reduced
dimensions. Defaults to `false`
* `:norm` - $p$ in above function. Defaults to 2
## Examples
iex> Axon.Layers.global_lp_pool(Nx.iota({3, 2, 3}, type: {:f, 32}), norm: 1)
#Nx.Tensor<
f32[3][2]
[
[3.0, 12.0],
[21.0, 30.0],
[39.0, 48.0]
]
>
iex> Axon.Layers.global_lp_pool(Nx.iota({1, 3, 2, 2}, type: {:f, 16}), keep_axes: true)
#Nx.Tensor<
f16[1][3][1][1]
[
[
[
[3.7421875]
],
[
[11.2265625]
],
[
[19.125]
]
]
]
>
"""
defn global_lp_pool(x, opts \\ []) do
opts = keyword!(opts, norm: 2, keep_axes: false)
norm = opts[:norm]
all_but_batch_and_feature =
transform(Nx.rank(x), fn rank ->
for i <- 2..(rank - 1), do: i
end)
x
|> Nx.power(norm)
|> Nx.sum(axes: all_but_batch_and_feature, keep_axes: opts[:keep_axes])
|> Nx.power(Nx.divide(Nx.tensor(1, type: Nx.type(x)), norm))
end
## Sparse
@doc """
Computes embedding by treating weight matrix as a lookup table
for discrete tokens.
`input` is a vector of discrete values, typically representing tokens
(e.g. words, characters, etc.) from a vocabulary. `weights` is a weight
matrix of shape `{vocab_size, embedding_size}` from which the dense
embeddings will be drawn.
## Parameter Shapes
* `input` - `{batch_size, seq_len}`
* `weights` - `{vocab_size, embedding_size}`
## Examples
iex> input = Nx.tensor([[1, 2, 4, 5], [4, 3, 2, 9]])
iex> weights = Nx.tensor([
...> [0.46299999952316284, 0.5562999844551086, 0.18170000612735748],
...> [0.9801999926567078, 0.09780000150203705, 0.5333999991416931],
...> [0.6980000138282776, 0.9240999817848206, 0.23479999601840973],
...> [0.31929999589920044, 0.42250001430511475, 0.7865999937057495],
...> [0.5519000291824341, 0.5662999749183655, 0.20559999346733093],
...> [0.1898999959230423, 0.9311000108718872, 0.8356000185012817],
...> [0.6383000016212463, 0.8794000148773193, 0.5282999873161316],
...> [0.9523000121116638, 0.7597000002861023, 0.08250000327825546],
...> [0.6622999906539917, 0.02329999953508377, 0.8205999732017517],
...> [0.9855999946594238, 0.36419999599456787, 0.5372999906539917]
...> ])
iex> Axon.Layers.embedding(input, weights)
#Nx.Tensor<
f32[2][4][3]
[
[
[0.9801999926567078, 0.09780000150203705, 0.5333999991416931],
[0.6980000138282776, 0.9240999817848206, 0.23479999601840973],
[0.5519000291824341, 0.5662999749183655, 0.20559999346733093],
[0.1898999959230423, 0.9311000108718872, 0.8356000185012817]
],
[
[0.5519000291824341, 0.5662999749183655, 0.20559999346733093],
[0.31929999589920044, 0.42250001430511475, 0.7865999937057495],
[0.6980000138282776, 0.9240999817848206, 0.23479999601840973],
[0.9855999946594238, 0.36419999599456787, 0.5372999906539917]
]
]
>
"""
defn embedding(input, weights) do
Nx.take(weights, input, axis: 0)
end
## Attention
@doc """
Functional implementation of dot-product attention layer.
"""
@doc type: :attention
defn dot_product_attention(query, key, value, bias, opts \\ []) do
opts = keyword!(opts, [:axis, rate: 0.5])
axis = opts[:axis]
rate = opts[:rate]
depth = transform(query, fn q -> elem(Nx.shape(q), Nx.rank(q) - 1) end)
n = Nx.rank(query)
batch_dims =
transform({n, axis}, fn {n, axis} -> Enum.to_list(0..(n - 1)) -- [n - 1 | axis] end)
qk_perm =
transform({batch_dims, axis, n}, fn {batch_dims, axis, n} ->
batch_dims ++ axis ++ [n - 1]
end)
v_perm =
transform({batch_dims, axis, n}, fn {batch_dims, axis, n} ->
batch_dims ++ [n - 1] ++ axis
end)
key = Nx.transpose(key, axes: qk_perm)
query = Nx.transpose(query, axes: qk_perm)
value = Nx.transpose(value, axes: v_perm)
query = query / Nx.sqrt(depth)
attn_weights = Nx.dot(query, [n - 1], batch_dims, key, [n - 1], batch_dims) + bias
norm_dims =
transform({Nx.rank(attn_weights), axis}, fn {n_dims, axis} ->
Enum.to_list((n_dims - length(axis))..(n_dims - 1))
end)
attn_weights =
attn_weights
|> Nx.exp()
|> Nx.sum(axes: norm_dims, keep_axes: true)
|> Nx.log()
|> Nx.negate()
|> Nx.add(attn_weights)
|> Nx.exp()
attn_weights = dropout(attn_weights, rate: rate)
{w_contracting_dims, v_contracting_dims} =
transform({norm_dims, Nx.rank(value), axis}, fn {n, v, a} ->
{n, Enum.to_list((v - length(a))..(v - 1))}
end)
y =
Nx.dot(
attn_weights,
w_contracting_dims,
batch_dims,
value,
v_contracting_dims,
batch_dims
)
perm_inv =
transform(qk_perm, fn perm ->
perm
|> Enum.with_index()
|> Enum.map(fn {_, i} -> i end)
end)
Nx.transpose(y, axes: perm_inv)
end
## Shape
@doc """
Flattens input to shape of `{batch, units}` by folding outer
dimensions.
## Examples
iex> Axon.Layers.flatten(Nx.iota({1, 2, 2}, type: {:f, 32}))
#Nx.Tensor<
f32[1][4]
[
[0.0, 1.0, 2.0, 3.0]
]
>
"""
defn flatten(x) do
new_shape = transform(Nx.shape(x), &Axon.Shape.flatten/1)
Nx.reshape(x, new_shape)
end
end
| 30.638596 | 134 | 0.613567 |
f7a81fde1186681c9d9ff5b71abf5e7bca4470b7 | 2,954 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2beta1_intent_message_rbm_card_content.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2beta1_intent_message_rbm_card_content.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2beta1_intent_message_rbm_card_content.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessageRbmCardContent do
@moduledoc """
Rich Business Messaging (RBM) Card content
## Attributes
* `description` (*type:* `String.t`, *default:* `nil`) - Optional. Description of the card (at most 2000 bytes).
At least one of the title, description or media must be set.
* `media` (*type:* `GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessageRbmCardContentRbmMedia.t`, *default:* `nil`) - Optional. However at least one of the title, description or media must
be set. Media (image, GIF or a video) to include in the card.
* `suggestions` (*type:* `list(GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessageRbmSuggestion.t)`, *default:* `nil`) - Optional. List of suggestions to include in the card.
* `title` (*type:* `String.t`, *default:* `nil`) - Optional. Title of the card (at most 200 bytes).
At least one of the title, description or media must be set.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:description => String.t(),
:media =>
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessageRbmCardContentRbmMedia.t(),
:suggestions =>
list(
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessageRbmSuggestion.t()
),
:title => String.t()
}
field(:description)
field(:media,
as:
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessageRbmCardContentRbmMedia
)
field(:suggestions,
as: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessageRbmSuggestion,
type: :list
)
field(:title)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessageRbmCardContent do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessageRbmCardContent.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessageRbmCardContent do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.392405 | 212 | 0.732905 |
f7a82f55290ed51fdca4a26b7685f4d42935354f | 948 | exs | Elixir | apps/chat/config/config.exs | mikemorris/elixir-eks-terraform | e1c8a4e7ba26bd85322dfcedd229b3558d9d5844 | [
"MIT"
] | 2 | 2019-06-27T11:51:11.000Z | 2020-06-04T16:00:55.000Z | apps/chat/config/config.exs | mikemorris/elixir-eks-terraform | e1c8a4e7ba26bd85322dfcedd229b3558d9d5844 | [
"MIT"
] | null | null | null | apps/chat/config/config.exs | mikemorris/elixir-eks-terraform | e1c8a4e7ba26bd85322dfcedd229b3558d9d5844 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
use Mix.Config
# Configures the endpoint
config :chat, ChatWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "fKWZy7Qut6HjUKDfCi2Df/0x8kBag4US430ZkD917d86CTw1tawqulKQGge+sdVf",
render_errors: [view: ChatWeb.ErrorView, accepts: ~w(html json)],
pubsub: [name: Chat.PubSub, adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 33.857143 | 86 | 0.768987 |
f7a8790768e89092f07f51e73078306e9b5f1751 | 558 | exs | Elixir | test/todo/todo_list_test.exs | sprql/planner | 9f42b34dc511bfe1668b7092d0a68924b9dc9501 | [
"MIT"
] | null | null | null | test/todo/todo_list_test.exs | sprql/planner | 9f42b34dc511bfe1668b7092d0a68924b9dc9501 | [
"MIT"
] | null | null | null | test/todo/todo_list_test.exs | sprql/planner | 9f42b34dc511bfe1668b7092d0a68924b9dc9501 | [
"MIT"
] | null | null | null | defmodule Planner.Todo.ListTest do
use Planner.ModelCase
alias Planner.Todo.List
@valid_attrs %{description: "some description", name: "some name", position: 42, project_id: "7488a646-e31f-11e4-aace-600308960662", state: "some state"}
@invalid_attrs %{}
test "changeset with valid attributes" do
changeset = Todo.List.changeset(%Todo.List{}, @valid_attrs)
assert changeset.valid?
end
test "changeset with invalid attributes" do
changeset = Todo.List.changeset(%Todo.List{}, @invalid_attrs)
refute changeset.valid?
end
end
| 29.368421 | 155 | 0.731183 |
f7a89a8e861c2101525007b79ffa938542c40536 | 5,972 | ex | Elixir | lib/stripe/subscriptions/credit_note.ex | alanvardy/stripity_stripe | 8cb9a447e8e801a799c244564f41c961246d5563 | [
"BSD-3-Clause"
] | null | null | null | lib/stripe/subscriptions/credit_note.ex | alanvardy/stripity_stripe | 8cb9a447e8e801a799c244564f41c961246d5563 | [
"BSD-3-Clause"
] | null | null | null | lib/stripe/subscriptions/credit_note.ex | alanvardy/stripity_stripe | 8cb9a447e8e801a799c244564f41c961246d5563 | [
"BSD-3-Clause"
] | null | null | null | defmodule Stripe.CreditNote do
@moduledoc """
Work with Stripe Credit Note objects.
You can:
- Preview a credit note
- Create a credit note
- Retrieve a credit note
- Update a credit note
- Void a credit note
- List credit notes
Stripe API reference: https://stripe.com/docs/api/credit_notes
"""
use Stripe.Entity
import Stripe.Request
@type tax_amount :: %{
amount: integer,
inclusive: boolean,
tax_rate: Stripe.id() | Stripe.TaxRate.t()
}
@type discount :: %{
amount: integer,
discount: String.t()
}
@type t :: %__MODULE__{
id: Stripe.id(),
object: String.t(),
amount: integer,
created: Stripe.timestamp(),
currency: String.t(),
customer: Stripe.id() | Stripe.Customer.t() | nil,
customer_balance_transaction: Stripe.id() | Stripe.CustomerBalanceTransaction.t() | nil,
discount_amount: integer,
discount_amounts: [discount],
invoice: Stripe.id() | Stripe.Invoice.t(),
lines: Stripe.List.t(Stripe.LineItem.t()),
livemode: boolean,
memo: String.t(),
metadata: Stripe.Types.metadata(),
number: String.t(),
out_of_band_amount: integer | nil,
pdf: String.t(),
reason: String.t() | nil,
refund: Stripe.id() | Stripe.Refund.t() | nil,
status: String.t(),
subtotal: integer,
tax_amounts: [tax_amount()],
total: integer,
type: String.t(),
voided_at: Stripe.timestamp()
}
defstruct [
:id,
:object,
:amount,
:created,
:currency,
:customer,
:customer_balance_transaction,
:discount_amount,
:discount_amounts,
:invoice,
:lines,
:livemode,
:memo,
:metadata,
:number,
:out_of_band_amount,
:pdf,
:reason,
:refund,
:status,
:subtotal,
:tax_amounts,
:total,
:type,
:voided_at
]
@plural_endpoint "credit_notes"
@doc """
Preview a credit note.
Stripe.CreditNote.preview(%{
invoice: "in_173uNd4Wq104wst7Gf4dgq1Y",
amount: 500,
})
"""
@spec preview(params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
:amount => number,
:invoice => Stripe.id(),
optional(:credit_amount) => number,
optional(:memo) => String.t(),
optional(:metadata) => Stripe.Types.metadata(),
optional(:reason) => String.t(),
optional(:refund_amount) => number,
optional(:refund) => Stripe.id()
}
| %{}
def preview(params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/preview")
|> put_params(params)
|> put_method(:get)
|> make_request()
end
@doc """
Create a credit note.
Stripe.CreditNote.create(%{
invoice: "in_173uNd4Wq104wst7Gf4dgq1Y",
amount: 500,
})
"""
@spec create(params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
:amount => number,
:invoice => Stripe.id(),
optional(:credit_amount) => number,
optional(:memo) => String.t(),
optional(:metadata) => Stripe.Types.metadata(),
optional(:reason) => String.t(),
optional(:refund_amount) => number,
optional(:refund) => Stripe.id()
}
| %{}
def create(params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint)
|> put_params(params)
|> put_method(:post)
|> make_request()
end
@doc """
Retrieve a Credit Note.
Stripe.CreditNote.retrieve("cn_1EXwJk4Wq104wst7IISdh9ed")
"""
@spec retrieve(Stripe.id() | t, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
def retrieve(id, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_method(:get)
|> make_request()
end
@doc """
Update a credit note.
Takes the `id` and a map of changes.
Stripe.CreditNote.update(
"cn_1EXwJk4Wq104wst7IISdh9ed",
%{
metadata: {order_id: "6735"},
}
)
"""
@spec update(Stripe.id() | t, params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
optional(:memo) => String.t(),
optional(:metadata) => Stripe.Types.metadata()
}
| %{}
def update(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_method(:post)
|> put_params(params)
|> make_request()
end
@doc """
Void a credit note.
Stripe.CreditNote.void("cn_1EXwJk4Wq104wst7IISdh9ed")
"""
@spec void(Stripe.id() | t, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
def void(id, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}/void")
|> put_method(:post)
|> make_request()
end
@doc """
List all credit notes.
Stripe.CreditNote.list(limit: 3)
"""
@spec list(params, Stripe.options()) :: {:ok, Stripe.List.t(t)} | {:error, Stripe.Error.t()}
when params:
%{
optional(:ending_before) => t | Stripe.id(),
optional(:limit) => 1..100,
optional(:invoice) => Stripe.id(),
optional(:starting_after) => t | Stripe.id()
}
| %{}
def list(params \\ %{}, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint)
|> put_method(:get)
|> put_params(params)
|> cast_to_id([:ending_before, :starting_after])
|> make_request()
end
end
| 26.660714 | 98 | 0.533657 |
f7a8af51398e7986b2d1cae85e9f774aa4c03dd7 | 2,098 | exs | Elixir | config/dev.exs | riebeekn/phx-auth-with-pow | 2b555365d6961b9afbff99f25540ca41264eba82 | [
"MIT"
] | 14 | 2019-02-27T18:49:28.000Z | 2020-12-24T21:39:16.000Z | config/dev.exs | ammy-bajwa/phx-auth-with-pow | 2b555365d6961b9afbff99f25540ca41264eba82 | [
"MIT"
] | null | null | null | config/dev.exs | ammy-bajwa/phx-auth-with-pow | 2b555365d6961b9afbff99f25540ca41264eba82 | [
"MIT"
] | 5 | 2019-07-16T17:50:36.000Z | 2020-08-12T22:14:41.000Z | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :warehouse, WarehouseWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :warehouse, WarehouseWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/warehouse_web/views/.*(ex)$},
~r{lib/warehouse_web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
# Configure your database
config :warehouse, Warehouse.Repo,
username: "postgres",
password: "postgres",
database: "warehouse_dev",
hostname: "localhost",
pool_size: 10
| 27.605263 | 68 | 0.689228 |
f7a8fbb165089200507577a91cb2ac27685eccc9 | 248 | ex | Elixir | lib/verk_web/mount.ex | apodlaski/verk_web | 91c544b1f792f929e06f18e26b964e23a5771a8a | [
"MIT"
] | null | null | null | lib/verk_web/mount.ex | apodlaski/verk_web | 91c544b1f792f929e06f18e26b964e23a5771a8a | [
"MIT"
] | null | null | null | lib/verk_web/mount.ex | apodlaski/verk_web | 91c544b1f792f929e06f18e26b964e23a5771a8a | [
"MIT"
] | null | null | null | defmodule VerkWeb.Mount do
defmacro __using__(path: path) do
quote bind_quoted: [path: path] do
path = if String.starts_with?(path, "/"), do: path, else: "/" <> path
socket("#{path}/socket", VerkWeb.UserSocket)
end
end
end
| 24.8 | 75 | 0.641129 |
f7a8ff229917d00b4ec73e2e5c56f0d2fb7b1b94 | 262 | exs | Elixir | config/test.exs | angelikatyborska/mazes | cba3b1d6aaaa896f4ca505b477cf03b67523ebf0 | [
"MIT"
] | 116 | 2020-12-26T20:56:01.000Z | 2022-03-12T15:12:37.000Z | config/test.exs | lohayon/mazes | 98a6276ea7440af938edfb14476a5877fdc295e1 | [
"MIT"
] | null | null | null | config/test.exs | lohayon/mazes | 98a6276ea7440af938edfb14476a5877fdc295e1 | [
"MIT"
] | 10 | 2020-12-29T05:11:43.000Z | 2022-01-02T00:57:12.000Z | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :mazes, MazesWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 23.818182 | 56 | 0.732824 |
f7a9250a38fe2828ee1712e68a0ae2aa1350323c | 71 | exs | Elixir | test/adify/yaml_test.exs | ericsullivan/adify | 25b842498ccfbf612e42d8b46530aad1ffb1af5a | [
"MIT"
] | 5 | 2019-07-14T22:24:32.000Z | 2020-11-25T20:36:11.000Z | test/adify/yaml_test.exs | ericsullivan/adify | 25b842498ccfbf612e42d8b46530aad1ffb1af5a | [
"MIT"
] | 43 | 2018-01-18T15:16:30.000Z | 2021-01-23T22:12:17.000Z | test/adify/yaml_test.exs | ericsullivan/adify | 25b842498ccfbf612e42d8b46530aad1ffb1af5a | [
"MIT"
] | 2 | 2019-05-28T17:50:27.000Z | 2020-03-23T21:00:02.000Z | defmodule Adify.YAMLTest do
use ExUnit.Case
doctest Adify.YAML
end
| 14.2 | 27 | 0.788732 |
f7a93009c78572e151ca931552228b3413603869 | 2,159 | exs | Elixir | test/commands/user_test.exs | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | 356 | 2016-03-16T12:37:28.000Z | 2021-12-18T03:22:39.000Z | test/commands/user_test.exs | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | 30 | 2016-03-16T09:19:10.000Z | 2021-01-12T08:10:52.000Z | test/commands/user_test.exs | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | 72 | 2016-03-16T13:32:14.000Z | 2021-03-23T11:27:43.000Z | defmodule Nectar.Command.UserTest do
use Nectar.ModelCase
alias Nectar.Command
alias Nectar.TestSetup
describe "login" do
setup do
{:ok, user} = TestSetup.User.create_user
{:ok, %{user: user}}
end
test "with valid credentials", %{user: user} do
{status, logged_in_user} =
Command.User.login(Nectar.Repo, %{email: user.email, password: "password"})
assert status == :ok
assert logged_in_user.id == user.id
end
test "with no credentials" do
{status, logged_in_user} =
Command.User.login(Nectar.Repo, %{})
assert status == :error
assert errors_on(logged_in_user) == [email: "can't be blank", password: "can't be blank"]
end
test "with incorrect password", %{user: user} do
{status, logged_in_user} =
Command.User.login(Nectar.Repo, %{email: user.email, password: "passwor"})
assert status == :error
assert errors_on(logged_in_user) == [user: "Invalid credentials"]
end
test "with incorrect email", %{user: user} do
{status, logged_in_user} =
Command.User.login(Nectar.Repo, %{email: user.email <> "abc", password: "password"})
assert status == :error
assert errors_on(logged_in_user) == [user: "Invalid credentials"]
end
end
describe "register_user" do
test "with valid attributes" do
{status, user} = Command.User.register_user(Nectar.Repo, Nectar.TestSetup.User.valid_attrs)
assert status == :ok
assert user.id
refute user.is_admin
end
test "with invalid attributes" do
{status, _user} = Command.User.register_user(Nectar.Repo, Nectar.TestSetup.User.invalid_attrs)
refute status == :ok
end
end
describe "register_admin" do
test "with valid attributes" do
{status, user} = Command.User.register_admin(Nectar.Repo, Nectar.TestSetup.User.valid_attrs)
assert status == :ok
assert user.id
assert user.is_admin
end
test "with invalid attributes" do
{status, _user} = Command.User.register_admin(Nectar.Repo, Nectar.TestSetup.User.invalid_attrs)
refute status == :ok
end
end
end
| 30.842857 | 101 | 0.657249 |
f7a999e56ef1df9705af169f60031f6ee098546a | 1,923 | ex | Elixir | lib/strichliste_elixir_web.ex | DEvil0000/strichliste_elixir | 33efe808ced1dd3e3650212a506e8c3322277b2b | [
"MIT"
] | null | null | null | lib/strichliste_elixir_web.ex | DEvil0000/strichliste_elixir | 33efe808ced1dd3e3650212a506e8c3322277b2b | [
"MIT"
] | null | null | null | lib/strichliste_elixir_web.ex | DEvil0000/strichliste_elixir | 33efe808ced1dd3e3650212a506e8c3322277b2b | [
"MIT"
] | 1 | 2019-05-24T18:18:24.000Z | 2019-05-24T18:18:24.000Z | defmodule StrichlisteElixirWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use StrichlisteElixirWeb, :controller
use StrichlisteElixirWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: StrichlisteElixirWeb
import Plug.Conn
import StrichlisteElixirWeb.Gettext
alias StrichlisteElixirWeb.Router.Helpers, as: Routes
import Phoenix.LiveView.Controller, only: [live_render: 3]
end
end
def view do
quote do
use Phoenix.View,
root: "lib/strichliste_elixir_web/templates",
namespace: StrichlisteElixirWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 1, get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import StrichlisteElixirWeb.ErrorHelpers
import StrichlisteElixirWeb.Gettext
alias StrichlisteElixirWeb.Router.Helpers, as: Routes
import Phoenix.LiveView, only: [live_render: 2, live_render: 3]
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
import Phoenix.LiveView.Router
end
end
def channel do
quote do
use Phoenix.Channel
import StrichlisteElixirWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 26.342466 | 83 | 0.710868 |
f7a9ad281b0e19da1e9753a75624e08e4df48da8 | 79 | ex | Elixir | web/views/coherence/email_view.ex | Symbolk/social_network | 11df1ba9bc19fd140b630ac2abbd4b13b42def92 | [
"MIT"
] | 17 | 2017-01-02T10:38:28.000Z | 2021-02-28T22:16:54.000Z | web/views/coherence/email_view.ex | Symbolk/social_network | 11df1ba9bc19fd140b630ac2abbd4b13b42def92 | [
"MIT"
] | null | null | null | web/views/coherence/email_view.ex | Symbolk/social_network | 11df1ba9bc19fd140b630ac2abbd4b13b42def92 | [
"MIT"
] | 2 | 2017-01-09T13:02:13.000Z | 2018-06-16T22:01:53.000Z | defmodule Coherence.EmailView do
use SocialNetwork.Coherence.Web, :view
end
| 15.8 | 40 | 0.810127 |
f7a9b0557c9d85856ed129f94ad154ee44541f59 | 1,294 | ex | Elixir | lib/beeline/event_store_d_b/spear.ex | NFIBrokerage/beeline | 8280a351a8c36634ced9088f90fe8103b47d685b | [
"Apache-2.0"
] | null | null | null | lib/beeline/event_store_d_b/spear.ex | NFIBrokerage/beeline | 8280a351a8c36634ced9088f90fe8103b47d685b | [
"Apache-2.0"
] | null | null | null | lib/beeline/event_store_d_b/spear.ex | NFIBrokerage/beeline | 8280a351a8c36634ced9088f90fe8103b47d685b | [
"Apache-2.0"
] | null | null | null | import Beeline.Utils, only: [if_spear: 1]
if_spear do
defmodule Beeline.EventStoreDB.Spear do
@moduledoc false
# functions for working with EventStoreDB via Spear
@behaviour Beeline.EventStoreDB
def latest_event_number(conn, stream) do
Spear.stream!(conn, stream,
from: :end,
direction: :backwards,
chunk_size: 1
)
|> Enum.take(1)
|> case do
[event] ->
Spear.Event.revision(event)
# coveralls-ignore-start
[] ->
-1
# coveralls-ignore-stop
end
end
def decode_event(%Spear.Event{} = event) do
atomify(event.body)
end
# coveralls-ignore-start
defp atomify(map)
defp atomify(%{__struct__: _some_module} = struct), do: struct
defp atomify(map) when is_map(map) do
Enum.into(map, %{}, fn {k, v} ->
{ensure_is_atom(k), atomify(v)}
end)
end
defp atomify(list) when is_list(list) do
Enum.map(list, &atomify/1)
end
defp atomify(value), do: value
defp ensure_is_atom(key) when is_binary(key), do: String.to_atom(key)
defp ensure_is_atom(key) when is_atom(key), do: key
# coveralls-ignore-stop
def stream_position(event) do
Spear.Event.revision(event)
end
end
end
| 22.310345 | 73 | 0.613601 |
f7a9c43647f6ab941a60d8cd21ab3a4c1e9065c1 | 2,633 | ex | Elixir | clients/chat/lib/google_api/chat/v1/model/google_apps_card_v1_button.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/chat/lib/google_api/chat/v1/model/google_apps_card_v1_button.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/chat/lib/google_api/chat/v1/model/google_apps_card_v1_button.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Chat.V1.Model.GoogleAppsCardV1Button do
@moduledoc """
A button. Can be a text button or an image button.
## Attributes
* `altText` (*type:* `String.t`, *default:* `nil`) - The alternative text used for accessibility. Has no effect when an icon is set; use `icon.alt_text` instead.
* `color` (*type:* `GoogleApi.Chat.V1.Model.Color.t`, *default:* `nil`) - If set, the button is filled with a solid background.
* `disabled` (*type:* `boolean()`, *default:* `nil`) - If true, the button is displayed in a disabled state and doesn't respond to user actions.
* `icon` (*type:* `GoogleApi.Chat.V1.Model.GoogleAppsCardV1Icon.t`, *default:* `nil`) - The icon image.
* `onClick` (*type:* `GoogleApi.Chat.V1.Model.GoogleAppsCardV1OnClick.t`, *default:* `nil`) - The action to perform when the button is clicked.
* `text` (*type:* `String.t`, *default:* `nil`) - The text of the button.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:altText => String.t() | nil,
:color => GoogleApi.Chat.V1.Model.Color.t() | nil,
:disabled => boolean() | nil,
:icon => GoogleApi.Chat.V1.Model.GoogleAppsCardV1Icon.t() | nil,
:onClick => GoogleApi.Chat.V1.Model.GoogleAppsCardV1OnClick.t() | nil,
:text => String.t() | nil
}
field(:altText)
field(:color, as: GoogleApi.Chat.V1.Model.Color)
field(:disabled)
field(:icon, as: GoogleApi.Chat.V1.Model.GoogleAppsCardV1Icon)
field(:onClick, as: GoogleApi.Chat.V1.Model.GoogleAppsCardV1OnClick)
field(:text)
end
defimpl Poison.Decoder, for: GoogleApi.Chat.V1.Model.GoogleAppsCardV1Button do
def decode(value, options) do
GoogleApi.Chat.V1.Model.GoogleAppsCardV1Button.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Chat.V1.Model.GoogleAppsCardV1Button do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.467742 | 165 | 0.701861 |
f7a9e3e18d7cf2d22b4082b0320dac21d6e5bb6a | 1,819 | ex | Elixir | lib/mailgun_logger_web/endpoint.ex | jackjoe/mailgun_logger | 7d5a1989afdeb215bcd3753671c61bc25ed4e522 | [
"MIT"
] | 64 | 2020-02-10T20:42:46.000Z | 2021-11-16T10:47:50.000Z | lib/mailgun_logger_web/endpoint.ex | jackjoe/mailgun_logger | 7d5a1989afdeb215bcd3753671c61bc25ed4e522 | [
"MIT"
] | 16 | 2020-02-10T20:45:57.000Z | 2022-03-04T12:53:34.000Z | lib/mailgun_logger_web/endpoint.ex | jackjoe/mailgun_logger | 7d5a1989afdeb215bcd3753671c61bc25ed4e522 | [
"MIT"
] | 4 | 2020-04-03T17:13:19.000Z | 2020-07-17T12:56:31.000Z | defmodule MailgunLoggerWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :mailgun_logger
@moduledoc false
socket("/socket", MailgunLoggerWeb.UserSocket)
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug(
Plug.Static,
at: "/",
from: :mailgun_logger,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket("/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket)
plug(Phoenix.LiveReloader)
plug(Phoenix.CodeReloader)
end
plug(
Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Jason
)
plug(Plug.MethodOverride)
plug(Plug.Head)
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug(
Plug.Session,
store: :cookie,
key: "_mailgun_logger_key",
signing_salt: "xI0ktzaL"
)
plug(MailgunLoggerWeb.Router)
@doc """
Callback invoked for dynamically configuring the endpoint.
It receives the endpoint configuration and checks if
configuration should be loaded from the system environment.
"""
def init(_key, config) do
if config[:load_from_system_env] do
port = System.fetch_env!("PORT") || raise "expected the PORT environment variable to be set"
{:ok, Keyword.put(config, :http, [:inet6, port: port])}
else
{:ok, config}
end
end
def build_conn(), do: %Plug.Conn{private: %{phoenix_endpoint: MailgunLoggerWeb.Endpoint}}
end
| 27.560606 | 98 | 0.699285 |
f7aa03e133ab5fb3f02c7506b65c30bd610df412 | 753 | exs | Elixir | machine_translation/MorpHIN/Learned/Resources/Set1/TrainingInstances/49.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/Set1/TrainingInstances/49.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/Set1/TrainingInstances/49.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | **EXAMPLE FILE**
noun cm noun verb quantifier;
noun cm adjective verb quantifier;
verb pn adjective verb quantifier;
noun cm adjective verb quantifier;
noun cm noun verb quantifier;
cm cm adjective verb quantifier;
verb_aux pn adjective verb quantifier;
cm cm noun verb quantifier;
cm noun adjective neg quantifier;
noun cm adjective noun quantifier;
verb_aux conj adverb particle intensifier;
pnoun pnoun adjective verb quantifier;
noun verb noun verb quantifier;
SYM pnoun adjective verb quantifier;
cm pn noun verb quantifier;
cm pn noun particle quantifier;
noun cm quantifier noun intensifier;
SYM noun adjective verb quantifier;
cm noun adjective verb quantifier;
pnoun cm noun verb quantifier;
cm noun noun verb quantifier;
| 31.375 | 43 | 0.795485 |
f7aa2160361d9e21f14f5369d4bca53652707021 | 563 | exs | Elixir | survey/test/pledge_server_test.exs | RamanBut-Husaim/pragstudio-elixir | 21c723c933966798ae944ca2fd72697e9d0f2fa2 | [
"MIT"
] | null | null | null | survey/test/pledge_server_test.exs | RamanBut-Husaim/pragstudio-elixir | 21c723c933966798ae944ca2fd72697e9d0f2fa2 | [
"MIT"
] | null | null | null | survey/test/pledge_server_test.exs | RamanBut-Husaim/pragstudio-elixir | 21c723c933966798ae944ca2fd72697e9d0f2fa2 | [
"MIT"
] | null | null | null | defmodule PledgeServerTest do
use ExUnit.Case, async: true
alias Survey.PledgeServer
test "caches the 3 most recent pledges and totals their amounts" do
PledgeServer.start_link(:ok)
PledgeServer.create_pledge("frodo", 100)
PledgeServer.create_pledge("sam", 200)
PledgeServer.create_pledge("mary", 300)
PledgeServer.create_pledge("pippin", 400)
expected_pledges = [{"pippin", 400}, {"mary", 300}, {"sam", 200} ]
assert PledgeServer.recent_pledges() == expected_pledges
assert PledgeServer.total_pledged() == 900
end
end | 29.631579 | 70 | 0.721137 |
f7aa41869e9b28602334fc6fda16be3b3c829d08 | 11,067 | ex | Elixir | lib/xgit/repository/test/config_test.ex | scouten/xgit | 0e2f849c83cdf39a9249b319d63ff3682c482c2f | [
"Apache-2.0"
] | 94 | 2019-05-28T05:29:54.000Z | 2022-02-18T20:03:20.000Z | lib/xgit/repository/test/config_test.ex | scouten/xgit | 0e2f849c83cdf39a9249b319d63ff3682c482c2f | [
"Apache-2.0"
] | 156 | 2019-05-26T03:27:24.000Z | 2020-10-08T05:44:26.000Z | lib/xgit/repository/test/config_test.ex | scouten/redo | 0e2f849c83cdf39a9249b319d63ff3682c482c2f | [
"Apache-2.0"
] | 5 | 2019-05-28T16:35:55.000Z | 2021-06-16T14:25:17.000Z | defmodule Xgit.Repository.Test.ConfigTest do
@moduledoc false
# Not normally part of the public API, but available for implementors of
# `Xgit.Repository.Storage` behaviour modules. Tests the callbacks related to
# `Xgit.ConfigEntry` to ensure correct implementation of the core contracts.
# Other tests may be necessary to ensure interop. (For example, the on-disk
# repository test code adds more tests to ensure correct interop with
# command-line git.)
# Users of this module must provide a `setup` callback that provides a
# `repo` member. This repository may be of any type, but should be "empty."
# An empty repo has the same data structures as an on-disk repo created
# via `git init` in a previously-empty directory.
# IMPORTANT: We assume that the repo is initialized with a minimal configuration
# that corresponds to the following:
# [core]
# repositoryformatversion = 0
# filemode = true
# bare = false
# logallrefupdates = true
# The official definition for this is located in on_disk_repo_test_case.ex,
# private function rewrite_config/1.
import Xgit.Util.SharedTestCase
define_shared_tests do
alias Xgit.ConfigEntry
alias Xgit.Repository.Storage
describe "get_config_entries/2" do
test "default case returns expected initial case", %{repo: repo} do
assert [_ | _] = config_entries = Storage.get_config_entries(repo)
assert [
%ConfigEntry{section: "core", subsection: nil, name: "bare", value: "false"},
%ConfigEntry{section: "core", subsection: nil, name: "filemode", value: "true"},
%ConfigEntry{
section: "core",
subsection: nil,
name: "logallrefupdates",
value: "true"
},
%ConfigEntry{
section: "core",
subsection: nil,
name: "repositoryformatversion",
value: "0"
}
] = Enum.sort(config_entries)
end
test "can filter by section", %{repo: repo} do
assert [_ | _] = config_entries = Storage.get_config_entries(repo, section: "core")
assert [
%ConfigEntry{section: "core", subsection: nil, name: "bare", value: "false"},
%ConfigEntry{section: "core", subsection: nil, name: "filemode", value: "true"},
%ConfigEntry{
section: "core",
subsection: nil,
name: "logallrefupdates",
value: "true"
},
%ConfigEntry{
section: "core",
subsection: nil,
name: "repositoryformatversion",
value: "0"
}
] = Enum.sort(config_entries)
end
test "can filter by subsection", %{repo: repo} do
assert [] =
_config_entries =
Storage.get_config_entries(repo, section: "core", subsection: "mumble")
end
test "can filter by section + name", %{repo: repo} do
assert [_ | _] =
config_entries = Storage.get_config_entries(repo, section: "core", name: "bare")
assert [
%ConfigEntry{section: "core", subsection: nil, name: "bare", value: "false"}
] = Enum.sort(config_entries)
end
end
describe "add_config_entry/3" do
test "basic case with default options", %{repo: repo} do
assert :ok =
Storage.add_config_entry(
repo,
%ConfigEntry{
section: "core",
subsection: nil,
name: "filemode",
value: "true"
}
)
config_entries = Storage.get_config_entries(repo)
assert [
%ConfigEntry{section: "core", subsection: nil, name: "bare", value: "false"},
%ConfigEntry{section: "core", subsection: nil, name: "filemode", value: "true"},
%ConfigEntry{
section: "core",
subsection: nil,
name: "logallrefupdates",
value: "true"
},
%ConfigEntry{
section: "core",
subsection: nil,
name: "repositoryformatversion",
value: "0"
}
] = Enum.sort(config_entries)
end
test "add?: true", %{repo: repo} do
# Yes, this example is nonsense.
assert :ok =
Storage.add_config_entry(
repo,
%ConfigEntry{
section: "core",
subsection: nil,
name: "filemode",
value: "false"
},
add?: true
)
config_entries =
Storage.get_config_entries(repo,
section: "core",
subsection: nil,
name: "filemode"
)
# Spec is agnostic as to whether new items get inserted at end of overall list
# or elsewhere; only that the values for this entry must be sorted in the order added.
assert [
%ConfigEntry{section: "core", subsection: nil, name: "filemode", value: "true"},
%ConfigEntry{section: "core", subsection: nil, name: "filemode", value: "false"}
] = config_entries
end
test "replace_all?: true", %{repo: repo} do
# Build upon previous nonsense example. Have multiple values and then replace them all.
assert :ok =
Storage.add_config_entry(
repo,
%ConfigEntry{
section: "core",
subsection: nil,
name: "filemode",
value: "false"
},
add?: true
)
# Not testing output; duplicates previous test.
assert :ok =
Storage.add_config_entry(
repo,
%ConfigEntry{
section: "core",
subsection: nil,
name: "filemode",
value: "42"
},
replace_all?: true
)
config_entries =
Storage.get_config_entries(repo,
section: "core",
subsection: nil,
name: "filemode"
)
# Spec is agnostic as to whether new items get inserted at end of overall list
# or elsewhere; only that the values for this entry must be sorted in the order added.
assert [
%ConfigEntry{section: "core", subsection: nil, name: "filemode", value: "42"}
] = config_entries
end
test "error: replacing multivar", %{repo: repo} do
# Build upon previous nonsense example. Have multiple values and then
# attempt to replace them all but without the replace_all?: true flag.
assert :ok =
Storage.add_config_entry(
repo,
%ConfigEntry{
section: "core",
subsection: nil,
name: "filemode",
value: "false"
},
add?: true
)
# Not testing output; duplicates previous test.
assert {:error, :replacing_multivar} =
Storage.add_config_entry(
repo,
%ConfigEntry{
section: "core",
subsection: nil,
name: "filemode",
value: "42"
}
)
config_entries =
Storage.get_config_entries(repo,
section: "core",
subsection: nil,
name: "filemode"
)
# Spec is agnostic as to whether new items get inserted at end of overall list
# or elsewhere; only that the values for this entry must be sorted in the order added.
assert [
%ConfigEntry{section: "core", subsection: nil, name: "filemode", value: "true"},
%ConfigEntry{section: "core", subsection: nil, name: "filemode", value: "false"}
] = config_entries
end
test "error: invalid entry", %{repo: repo} do
assert_raise ArgumentError,
"Xgit.Repository.Storage.add_config_entry/3: entry is invalid",
fn ->
Storage.add_config_entry(
repo,
%ConfigEntry{
section: "no spaces allowed",
subsection: nil,
name: "filemode",
value: "true"
}
)
end
end
end
describe "remove_config_entries/2" do
test "basic case without options (remove everything)", %{repo: repo} do
assert :ok = Storage.remove_config_entries(repo)
assert [] = Storage.get_config_entries(repo)
end
test "basic case: remove by section", %{repo: repo} do
assert :ok =
Storage.add_config_entry(
repo,
%ConfigEntry{
section: "other",
subsection: nil,
name: "filemode",
value: "false"
}
)
assert :ok = Storage.remove_config_entries(repo, section: "core")
config_entries = Storage.get_config_entries(repo)
assert [
%ConfigEntry{
section: "other",
subsection: nil,
name: "filemode",
value: "false"
}
] = Enum.sort(config_entries)
end
test "basic case: remove specific variable", %{repo: repo} do
assert :ok = Storage.remove_config_entries(repo, section: "core", name: "filemode")
config_entries = Storage.get_config_entries(repo)
assert [
%ConfigEntry{section: "core", subsection: nil, name: "bare", value: "false"},
%ConfigEntry{
section: "core",
subsection: nil,
name: "logallrefupdates",
value: "true"
},
%ConfigEntry{
section: "core",
subsection: nil,
name: "repositoryformatversion",
value: "0"
}
] = Enum.sort(config_entries)
end
end
end
end
| 34.69279 | 97 | 0.486943 |
f7aa5650f4e16ee8a105ead7c9fefafb536453b0 | 2,499 | ex | Elixir | hexdocs__pm__phoenix__up_and_running.html/hello/lib/hello_web/telemetry.ex | jim80net/elixir_tutorial_projects | db19901a9305b297faa90642bebcc08455621b52 | [
"Unlicense"
] | null | null | null | hexdocs__pm__phoenix__up_and_running.html/hello/lib/hello_web/telemetry.ex | jim80net/elixir_tutorial_projects | db19901a9305b297faa90642bebcc08455621b52 | [
"Unlicense"
] | null | null | null | hexdocs__pm__phoenix__up_and_running.html/hello/lib/hello_web/telemetry.ex | jim80net/elixir_tutorial_projects | db19901a9305b297faa90642bebcc08455621b52 | [
"Unlicense"
] | null | null | null | defmodule HelloWeb.Telemetry do
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
# Telemetry poller will execute the given period measurements
# every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:method, :route],
tag_values: &get_and_put_http_method/1,
unit: {:native, :millisecond}
),
summary("phoenix.live_view.mount.stop.duration",
unit: {:native, :millisecond},
tags: [:view, :connected?],
tag_values: &live_view_metric_tag_values/1
),
# Database Metrics
summary("hello.repo.query.total_time", unit: {:native, :millisecond}),
summary("hello.repo.query.decode_time", unit: {:native, :millisecond}),
summary("hello.repo.query.query_time", unit: {:native, :millisecond}),
summary("hello.repo.query.queue_time", unit: {:native, :millisecond}),
summary("hello.repo.query.idle_time", unit: {:native, :millisecond}),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {HelloWeb, :count_users, []}
]
end
defp get_and_put_http_method(%{conn: %{method: method}} = metadata) do
Map.put(metadata, :method, method)
end
defp live_view_metric_tag_values(metadata) do
metadata
|> Map.put(:view, inspect(metadata.socket.view))
|> Map.put(:connected?, get_connection_status(metadata.socket))
end
defp get_connection_status(%{connected?: true}), do: "Connected"
defp get_connection_status(%{connected?: false}), do: "Disconnected"
end
| 33.32 | 86 | 0.67427 |
f7aa84dfe084bb958cd08e8a1a4457b3beb391bb | 2,447 | ex | Elixir | lib/codes/codes_c02.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_c02.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_c02.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_C02 do
alias IcdCode.ICDCode
def _C020 do
%ICDCode{full_code: "C020",
category_code: "C02",
short_code: "0",
full_name: "Malignant neoplasm of dorsal surface of tongue",
short_name: "Malignant neoplasm of dorsal surface of tongue",
category_name: "Malignant neoplasm of dorsal surface of tongue"
}
end
def _C021 do
%ICDCode{full_code: "C021",
category_code: "C02",
short_code: "1",
full_name: "Malignant neoplasm of border of tongue",
short_name: "Malignant neoplasm of border of tongue",
category_name: "Malignant neoplasm of border of tongue"
}
end
def _C022 do
%ICDCode{full_code: "C022",
category_code: "C02",
short_code: "2",
full_name: "Malignant neoplasm of ventral surface of tongue",
short_name: "Malignant neoplasm of ventral surface of tongue",
category_name: "Malignant neoplasm of ventral surface of tongue"
}
end
def _C023 do
%ICDCode{full_code: "C023",
category_code: "C02",
short_code: "3",
full_name: "Malignant neoplasm of anterior two-thirds of tongue, part unspecified",
short_name: "Malignant neoplasm of anterior two-thirds of tongue, part unspecified",
category_name: "Malignant neoplasm of anterior two-thirds of tongue, part unspecified"
}
end
def _C024 do
%ICDCode{full_code: "C024",
category_code: "C02",
short_code: "4",
full_name: "Malignant neoplasm of lingual tonsil",
short_name: "Malignant neoplasm of lingual tonsil",
category_name: "Malignant neoplasm of lingual tonsil"
}
end
def _C028 do
%ICDCode{full_code: "C028",
category_code: "C02",
short_code: "8",
full_name: "Malignant neoplasm of overlapping sites of tongue",
short_name: "Malignant neoplasm of overlapping sites of tongue",
category_name: "Malignant neoplasm of overlapping sites of tongue"
}
end
def _C029 do
%ICDCode{full_code: "C029",
category_code: "C02",
short_code: "9",
full_name: "Malignant neoplasm of tongue, unspecified",
short_name: "Malignant neoplasm of tongue, unspecified",
category_name: "Malignant neoplasm of tongue, unspecified"
}
end
end
| 34.957143 | 96 | 0.634246 |
f7aa88b4499ada8a1b15a90f01bcf8c6f3500247 | 555 | ex | Elixir | lib/logster/string_formatter.ex | juantascon/logster | 187a5e760818a043bded0475d61b72c27552677d | [
"MIT"
] | null | null | null | lib/logster/string_formatter.ex | juantascon/logster | 187a5e760818a043bded0475d61b72c27552677d | [
"MIT"
] | null | null | null | lib/logster/string_formatter.ex | juantascon/logster | 187a5e760818a043bded0475d61b72c27552677d | [
"MIT"
] | null | null | null | defmodule Logster.StringFormatter do
def format(params) do
params
|> Enum.map(&format_field/1)
|> Enum.intersperse(?\s)
end
defp format_field({key, value}) do
[to_string(key), "=", format_value(value)]
end
defp format_value(value) when is_binary(value), do: value
defp format_value(value) when is_float(value), do: :erlang.float_to_binary(value, decimals: 3)
defp format_value(value) when is_atom(value) or is_integer(value), do: to_string(value)
defp format_value(value) when is_map(value), do: Jason.encode!(value)
end
| 32.647059 | 96 | 0.722523 |
f7aa907894d1bd0afd86a1283429c852ab5227cf | 4,267 | ex | Elixir | lib/bypass.ex | thiamsantos/bypass | 2a3b499ca44ea44d972c22a20b90669e2c7e2403 | [
"MIT"
] | null | null | null | lib/bypass.ex | thiamsantos/bypass | 2a3b499ca44ea44d972c22a20b90669e2c7e2403 | [
"MIT"
] | null | null | null | lib/bypass.ex | thiamsantos/bypass | 2a3b499ca44ea44d972c22a20b90669e2c7e2403 | [
"MIT"
] | null | null | null | defmodule Bypass do
@moduledoc """
Bypass provides a quick way to create a custom Plug that can be put in place
instead of an actual HTTP server to return prebaked responses to client
requests.
This module is the main interface to the library.
"""
defstruct pid: nil, port: nil
@typedoc """
Represents a Bypass server process.
"""
@type t :: %__MODULE__{pid: pid, port: non_neg_integer}
import Bypass.Utils
require Logger
@doc """
Starts an Elixir process running a minimal Plug app. The process is a HTTP
handler and listens to requests on a TCP port on localhost.
Use the other functions in this module to declare which requests are handled
and set expectations on the calls.
"""
def open(opts \\ []) do
case DynamicSupervisor.start_child(Bypass.Supervisor, Bypass.Instance.child_spec(opts)) do
{:ok, pid} ->
port = Bypass.Instance.call(pid, :port)
debug_log("Did open connection #{inspect(pid)} on port #{inspect(port)}")
bypass = %Bypass{pid: pid, port: port}
setup_framework_integration(test_framework(), bypass)
bypass
other ->
other
end
end
defp setup_framework_integration(:ex_unit, bypass = %{pid: pid}) do
ExUnit.Callbacks.on_exit({Bypass, pid}, fn ->
do_verify_expectations(bypass.pid, ExUnit.AssertionError)
end)
end
defp setup_framework_integration(:espec, _bypass) do
end
@doc """
Can be called to immediately verify if the declared request expectations have
been met.
Returns `:ok` on success and raises an error on failure.
"""
def verify_expectations!(bypass) do
verify_expectations!(test_framework(), bypass)
end
defp verify_expectations!(:ex_unit, _bypass) do
raise "Not available in ExUnit, as it's configured automatically."
end
if Code.ensure_loaded?(ESpec) do
defp verify_expectations!(:espec, bypass) do
do_verify_expectations(bypass.pid, ESpec.AssertionError)
end
end
defp do_verify_expectations(bypass_pid, error_module) do
case Bypass.Instance.call(bypass_pid, :on_exit) do
:ok ->
:ok
:ok_call ->
:ok
{:error, :too_many_requests, {:any, :any}} ->
raise error_module, "Expected only one HTTP request for Bypass"
{:error, :too_many_requests, {method, path}} ->
raise error_module, "Expected only one HTTP request for Bypass at #{method} #{path}"
{:error, :unexpected_request, {:any, :any}} ->
raise error_module, "Bypass got an HTTP request but wasn't expecting one"
{:error, :unexpected_request, {method, path}} ->
raise error_module,
"Bypass got an HTTP request but wasn't expecting one at #{method} #{path}"
{:error, :not_called, {:any, :any}} ->
raise error_module, "No HTTP request arrived at Bypass"
{:error, :not_called, {method, path}} ->
raise error_module,
"No HTTP request arrived at Bypass at #{method} #{path}"
{:exit, {class, reason, stacktrace}} ->
:erlang.raise(class, reason, stacktrace)
end
end
@doc """
Re-opens the TCP socket on the same port. Blocks until the operation is
complete.
"""
@spec up(Bypass.t()) :: :ok | {:error, :already_up}
def up(%Bypass{pid: pid}),
do: Bypass.Instance.call(pid, :up)
@doc """
Closes the TCP socket. Blocks until the operation is complete.
"""
@spec down(Bypass.t()) :: :ok | {:error, :already_down}
def down(%Bypass{pid: pid}),
do: Bypass.Instance.call(pid, :down)
def expect(%Bypass{pid: pid}, fun),
do: Bypass.Instance.call(pid, {:expect, fun})
def expect(%Bypass{pid: pid}, methods, paths, fun),
do: Bypass.Instance.call(pid, {:expect, methods, paths, fun})
def expect_once(%Bypass{pid: pid}, fun),
do: Bypass.Instance.call(pid, {:expect_once, fun})
def expect_once(%Bypass{pid: pid}, methods, paths, fun),
do: Bypass.Instance.call(pid, {:expect_once, methods, paths, fun})
def stub(%Bypass{pid: pid}, methods, paths, fun),
do: Bypass.Instance.call(pid, {:stub, methods, paths, fun})
def pass(%Bypass{pid: pid}),
do: Bypass.Instance.call(pid, :pass)
defp test_framework do
Application.get_env(:bypass, :test_framework, :ex_unit)
end
end
| 30.478571 | 94 | 0.664636 |
f7aaa5267f1e1ef27138459190c037b87401b63f | 20,555 | ex | Elixir | lib/phoenix_live_view/upload_config.ex | jowrjowr/phoenix_live_view | d52da3b41292f9488e4a5c210aa1110c3f6420b0 | [
"MIT"
] | 4,659 | 2019-03-14T20:22:43.000Z | 2022-03-31T20:13:30.000Z | lib/phoenix_live_view/upload_config.ex | jowrjowr/phoenix_live_view | d52da3b41292f9488e4a5c210aa1110c3f6420b0 | [
"MIT"
] | 1,745 | 2019-03-14T22:04:38.000Z | 2022-03-31T17:26:25.000Z | deps/phoenix_live_view/lib/phoenix_live_view/upload_config.ex | adrianomota/blog | ef3b2d2ed54f038368ead8234d76c18983caa75b | [
"MIT"
] | 744 | 2019-03-14T20:48:05.000Z | 2022-03-25T14:35:04.000Z | defmodule Phoenix.LiveView.UploadEntry do
@moduledoc """
The struct representing an upload entry.
"""
alias Phoenix.LiveView.UploadEntry
defstruct progress: 0,
preflighted?: false,
upload_config: nil,
upload_ref: nil,
ref: nil,
uuid: nil,
valid?: false,
done?: false,
cancelled?: false,
client_name: nil,
client_size: nil,
client_type: nil,
client_last_modified: nil
@type t :: %__MODULE__{
progress: integer(),
upload_config: String.t() | :atom,
upload_ref: String.t(),
ref: String.t() | nil,
uuid: String.t() | nil,
valid?: boolean(),
done?: boolean(),
cancelled?: boolean(),
client_name: String.t() | nil,
client_size: integer() | nil,
client_type: String.t() | nil,
client_last_modified: integer() | nil
}
@doc false
def put_progress(%UploadEntry{} = entry, 100) do
%UploadEntry{entry | progress: 100, done?: true}
end
def put_progress(%UploadEntry{} = entry, progress) do
%UploadEntry{entry | progress: progress}
end
end
defmodule Phoenix.LiveView.UploadConfig do
@moduledoc """
The struct representing an upload.
"""
alias Phoenix.LiveView.UploadConfig
alias Phoenix.LiveView.UploadEntry
@default_max_entries 1
@default_max_file_size 8_000_000
@default_chunk_size 64_000
@default_chunk_timeout 10_000
@unregistered :unregistered
@invalid :invalid
@too_many_files :too_many_files
if Version.match?(System.version(), ">= 1.8.0") do
@derive {Inspect,
only: [
:name,
:ref,
:entries,
:max_entries,
:max_file_size,
:accept,
:errors,
:auto_upload?,
:progress_event
]}
end
defstruct name: nil,
cid: :unregistered,
client_key: nil,
max_entries: @default_max_entries,
max_file_size: @default_max_file_size,
chunk_size: @default_chunk_size,
chunk_timeout: @default_chunk_timeout,
entries: [],
entry_refs_to_pids: %{},
entry_refs_to_metas: %{},
accept: [],
acceptable_types: MapSet.new(),
acceptable_exts: MapSet.new(),
external: false,
allowed?: false,
ref: nil,
errors: [],
auto_upload?: false,
progress_event: nil
@type t :: %__MODULE__{
name: atom() | String.t(),
# a nil cid represents a LiveView socket
cid: :unregistered | nil | integer(),
client_key: String.t(),
max_entries: pos_integer(),
max_file_size: pos_integer(),
entries: list(),
entry_refs_to_pids: %{String.t() => pid() | :unregistered | :done},
entry_refs_to_metas: %{String.t() => map()},
accept: list() | :any,
acceptable_types: MapSet.t(),
acceptable_exts: MapSet.t(),
external:
(UploadEntry.t(), Phoenix.LiveView.Socket.t() ->
{:ok | :error, meta :: %{uploader: String.t()}, Phoenix.LiveView.Socket.t()})
| false,
allowed?: boolean,
errors: list(),
ref: String.t(),
auto_upload?: boolean(),
progress_event:
(name :: atom() | String.t(), UploadEntry.t(), Phoenix.LiveView.Socket.t() ->
{:noreply, Phoenix.LiveView.Socket.t()})
| nil
}
@doc false
# we require a random_ref in order to ensure unique calls to `allow_upload`
# invalidate old uploads on the client and expire old tokens for the same
# upload name
def build(name, random_ref, [_ | _] = opts) when is_atom(name) do
{html_accept, acceptable_types, acceptable_exts} =
case Keyword.fetch(opts, :accept) do
{:ok, [_ | _] = accept} ->
{types, exts} = validate_accept_option(accept)
{Enum.join(accept, ","), types, exts}
{:ok, :any} ->
{:any, MapSet.new(), MapSet.new()}
{:ok, other} ->
raise ArgumentError, """
invalid accept filter provided to allow_upload.
A list of the following unique file type specifiers are supported:
* A valid case-insensitive filename extension, starting with a period (".") character.
For example: .jpg, .pdf, or .doc.
* A valid MIME type string, with no extensions.
Alternately, you can provide the atom :any to allow any kind of file. Got:
#{inspect(other)}
"""
:error ->
raise ArgumentError, """
the :accept option is required when allowing uploads.
Provide a list of unique file type specifiers or the atom :any to allow any kind of file.
"""
end
external =
case Keyword.fetch(opts, :external) do
{:ok, func} when is_function(func, 2) ->
func
{:ok, other} ->
raise ArgumentError, """
invalid :external value provided to allow_upload.
Only an anymous function receiving the socket as an argument is supported. Got:
#{inspect(other)}
"""
:error ->
false
end
max_entries =
case Keyword.fetch(opts, :max_entries) do
{:ok, pos_integer} when is_integer(pos_integer) and pos_integer > 0 ->
pos_integer
{:ok, other} ->
raise ArgumentError, """
invalid :max_entries value provided to allow_upload.
Only a positive integer is supported (Defaults to #{@default_max_entries}). Got:
#{inspect(other)}
"""
:error ->
@default_max_entries
end
max_file_size =
case Keyword.fetch(opts, :max_file_size) do
{:ok, pos_integer} when is_integer(pos_integer) and pos_integer > 0 ->
pos_integer
{:ok, other} ->
raise ArgumentError, """
invalid :max_file_size value provided to allow_upload.
Only a positive integer is supported (Defaults to #{@default_max_file_size} bytes). Got:
#{inspect(other)}
"""
:error ->
@default_max_file_size
end
chunk_size =
case Keyword.fetch(opts, :chunk_size) do
{:ok, pos_integer} when is_integer(pos_integer) and pos_integer > 0 ->
pos_integer
{:ok, other} ->
raise ArgumentError, """
invalid :chunk_size value provided to allow_upload.
Only a positive integer is supported (Defaults to #{@default_chunk_size} bytes). Got:
#{inspect(other)}
"""
:error ->
@default_chunk_size
end
chunk_timeout =
case Keyword.fetch(opts, :chunk_timeout) do
{:ok, pos_integer} when is_integer(pos_integer) and pos_integer > 0 ->
pos_integer
{:ok, other} ->
raise ArgumentError, """
invalid :chunk_timeout value provided to allow_upload.
Only a positive integer in milliseconds is supported (Defaults to #{
@default_chunk_timeout
} ms). Got:
#{inspect(other)}
"""
:error ->
@default_chunk_timeout
end
progress_event =
case Keyword.fetch(opts, :progress) do
{:ok, func} when is_function(func, 3) ->
func
{:ok, other} ->
raise ArgumentError, """
invalid :progress value provided to allow_upload.
Only 3-arity anonymous function is supported. Got:
#{inspect(other)}
"""
:error ->
nil
end
%UploadConfig{
ref: random_ref,
name: name,
max_entries: max_entries,
max_file_size: max_file_size,
entry_refs_to_pids: %{},
entry_refs_to_metas: %{},
accept: html_accept,
acceptable_types: acceptable_types,
acceptable_exts: acceptable_exts,
external: external,
chunk_size: chunk_size,
chunk_timeout: chunk_timeout,
progress_event: progress_event,
auto_upload?: Keyword.get(opts, :auto_upload, false),
allowed?: true
}
end
@doc false
def entry_pid(%UploadConfig{} = conf, %UploadEntry{} = entry) do
case Map.fetch(conf.entry_refs_to_pids, entry.ref) do
{:ok, pid} when is_pid(pid) -> pid
{:ok, status} when status in [@unregistered, @invalid] -> nil
end
end
@doc false
def get_entry_by_pid(%UploadConfig{} = conf, channel_pid) when is_pid(channel_pid) do
Enum.find_value(conf.entry_refs_to_pids, fn {ref, pid} ->
if channel_pid == pid do
get_entry_by_ref(conf, ref)
end
end)
end
@doc false
def get_entry_by_ref(%UploadConfig{} = conf, ref) do
Enum.find(conf.entries, fn %UploadEntry{} = entry -> entry.ref === ref end)
end
@doc false
def unregister_completed_external_entry(%UploadConfig{} = conf, entry_ref) do
%UploadEntry{} = entry = get_entry_by_ref(conf, entry_ref)
drop_entry(conf, entry)
end
@doc false
def unregister_completed_entry(%UploadConfig{} = conf, entry_ref) do
%UploadEntry{} = entry = get_entry_by_ref(conf, entry_ref)
drop_entry(conf, entry)
end
@doc false
def registered?(%UploadConfig{} = conf) do
Enum.find(conf.entry_refs_to_pids, fn {_ref, maybe_pid} -> is_pid(maybe_pid) end)
end
@doc false
def mark_preflighted(%UploadConfig{} = conf) do
refs_awaiting = refs_awaiting_preflight(conf)
new_conf = %UploadConfig{
conf
| entries: for(entry <- conf.entries, do: %UploadEntry{entry | preflighted?: true})
}
{new_conf, for(ref <- refs_awaiting, do: get_entry_by_ref(new_conf, ref))}
end
defp refs_awaiting_preflight(%UploadConfig{} = conf) do
for entry <- conf.entries, not entry.preflighted?, do: entry.ref
end
@doc false
def register_entry_upload(%UploadConfig{} = conf, channel_pid, entry_ref)
when is_pid(channel_pid) do
case Map.fetch(conf.entry_refs_to_pids, entry_ref) do
{:ok, @unregistered} ->
{:ok,
%UploadConfig{
conf
| entry_refs_to_pids: Map.put(conf.entry_refs_to_pids, entry_ref, channel_pid)
}}
{:ok, existing_pid} when is_pid(existing_pid) ->
{:error, :already_registered}
:error ->
{:error, :disallowed}
end
end
# specifics on the `accept` attribute are illuminated in the spec:
# https://html.spec.whatwg.org/multipage/input.html#attr-input-accept
@accept_wildcards ~w(audio/* image/* video/*)
defp validate_accept_option(accept) do
{types, exts} =
Enum.reduce(accept, {[], []}, fn opt, {types_acc, exts_acc} ->
{type, exts} = accept_option!(opt)
{[type | types_acc], exts ++ exts_acc}
end)
{MapSet.new(types), MapSet.new(exts)}
end
# wildcards for media files
defp accept_option!(key) when key in @accept_wildcards, do: {key, []}
defp accept_option!(<<"." <> extname::binary>> = ext) do
if MIME.has_type?(extname) do
{MIME.type(extname), [ext]}
else
raise ArgumentError, """
invalid accept filter provided to allow_upload.
Expected a file extension with a known MIME type.
MIME types can be extended in your application configuration as follows:
config :mime, :types, %{
"application/vnd.api+json" => ["json-api"]
}
Got:
#{inspect(extname)}
"""
end
end
defp accept_option!(filter) when is_binary(filter) do
if MIME.extensions(filter) != [] do
{filter, []}
else
raise ArgumentError, """
invalid accept filter provided to allow_upload.
Expected a known MIME type without parameters.
MIME types can be extended in your application configuration as follows:
config :mime, :types, %{
"application/vnd.api+json" => ["json-api"]
}
Got:
#{inspect(filter)}
"""
end
end
@doc false
def disallow(%UploadConfig{} = conf), do: %UploadConfig{conf | allowed?: false}
@doc false
def uploaded_entries(%UploadConfig{} = conf) do
Enum.filter(conf.entries, fn %UploadEntry{} = entry -> entry.progress == 100 end)
end
@doc false
def update_entry(%UploadConfig{} = conf, entry_ref, func) do
new_entries =
Enum.map(conf.entries, fn
%UploadEntry{ref: ^entry_ref} = entry -> func.(entry)
%UploadEntry{ref: _ef} = entry -> entry
end)
recalculate_computed_fields(%UploadConfig{conf | entries: new_entries})
end
@doc false
def update_progress(%UploadConfig{} = conf, entry_ref, progress)
when is_integer(progress) and progress >= 0 and progress <= 100 do
update_entry(conf, entry_ref, fn entry -> UploadEntry.put_progress(entry, progress) end)
end
@doc false
def update_entry_meta(%UploadConfig{} = conf, entry_ref, %{} = meta) do
case Map.fetch(meta, :uploader) do
{:ok, _} ->
:noop
:error ->
raise ArgumentError,
"external uploader metadata requires an :uploader key. Got: #{inspect(meta)}"
end
new_metas = Map.put(conf.entry_refs_to_metas, entry_ref, meta)
%UploadConfig{conf | entry_refs_to_metas: new_metas}
end
@doc false
def put_entries(%UploadConfig{} = conf, entries) do
new_entries =
for entry <- entries, !get_entry_by_ref(conf, Map.fetch!(entry, "ref")), do: entry
pruned_conf = maybe_replace_sole_entry(conf, new_entries)
new_conf =
Enum.reduce(new_entries, pruned_conf, fn client_entry, acc ->
case cast_and_validate_entry(acc, client_entry) do
{:ok, new_conf} -> new_conf
{:error, new_conf} -> new_conf
end
end)
if too_many_files?(new_conf) do
{:error, put_error(new_conf, new_conf.ref, @too_many_files)}
else
case new_conf do
%UploadConfig{errors: []} = new_conf ->
{:ok, new_conf}
%UploadConfig{errors: [_ | _]} = new_conf ->
{:error, new_conf}
end
end
end
defp maybe_replace_sole_entry(%UploadConfig{max_entries: 1} = conf, new_entries) do
with [entry] <- conf.entries,
[_new_entry] <- new_entries do
cancel_entry(conf, entry)
else
_ -> conf
end
end
defp maybe_replace_sole_entry(%UploadConfig{} = conf, _new_entries) do
conf
end
defp too_many_files?(%UploadConfig{entries: entries, max_entries: max}) do
length(entries) > max
end
defp cast_and_validate_entry(%UploadConfig{} = conf, %{"ref" => ref} = client_entry) do
:error = Map.fetch(conf.entry_refs_to_pids, ref)
entry = %UploadEntry{
ref: ref,
upload_ref: conf.ref,
upload_config: conf.name,
client_name: Map.fetch!(client_entry, "name"),
client_size: Map.fetch!(client_entry, "size"),
client_type: Map.fetch!(client_entry, "type"),
client_last_modified: Map.get(client_entry, "last_modified")
}
{:ok, entry}
|> validate_max_file_size(conf)
|> validate_accepted(conf)
|> case do
{:ok, entry} ->
{:ok, put_valid_entry(conf, entry)}
{:error, reason} ->
{:error, put_invalid_entry(conf, entry, reason)}
end
end
defp put_valid_entry(conf, entry) do
entry = %UploadEntry{entry | valid?: true, uuid: generate_uuid()}
new_pids = Map.put(conf.entry_refs_to_pids, entry.ref, @unregistered)
new_metas = Map.put(conf.entry_refs_to_metas, entry.ref, %{})
%UploadConfig{
conf
| entries: conf.entries ++ [entry],
entry_refs_to_pids: new_pids,
entry_refs_to_metas: new_metas
}
end
defp put_invalid_entry(conf, entry, reason) do
entry = %UploadEntry{entry | valid?: false}
new_pids = Map.put(conf.entry_refs_to_pids, entry.ref, @invalid)
new_metas = Map.put(conf.entry_refs_to_metas, entry.ref, %{})
new_conf = %UploadConfig{
conf
| entries: conf.entries ++ [entry],
entry_refs_to_pids: new_pids,
entry_refs_to_metas: new_metas
}
put_error(new_conf, entry.ref, reason)
end
defp validate_max_file_size({:ok, %UploadEntry{client_size: size}}, %UploadConfig{
max_file_size: max
})
when size > max or not is_integer(size),
do: {:error, :too_large}
defp validate_max_file_size({:ok, entry}, _conf), do: {:ok, entry}
defp validate_accepted({:ok, %UploadEntry{} = entry}, conf) do
if accepted?(conf, entry) do
{:ok, entry}
else
{:error, :not_accepted}
end
end
defp validate_accepted({:error, _} = error, _conf), do: error
defp accepted?(%UploadConfig{accept: :any}, %UploadEntry{}), do: true
defp accepted?(
%UploadConfig{acceptable_types: acceptable_types} = conf,
%UploadEntry{client_type: client_type} = entry
) do
cond do
# wildcard
String.starts_with?(client_type, "image/") and "image/*" in acceptable_types -> true
String.starts_with?(client_type, "audio/") and "audio/*" in acceptable_types -> true
String.starts_with?(client_type, "video/") and "video/*" in acceptable_types -> true
# strict
client_type in acceptable_types -> true
String.downcase(Path.extname(entry.client_name), :ascii) in conf.acceptable_exts -> true
true -> false
end
end
defp recalculate_computed_fields(%UploadConfig{} = conf) do
recalculate_errors(conf)
end
defp recalculate_errors(%UploadConfig{ref: ref} = conf) do
if too_many_files?(conf) do
conf
else
new_errors =
Enum.filter(conf.errors, fn
{^ref, @too_many_files} -> false
_ -> true
end)
%UploadConfig{conf | errors: new_errors}
end
end
@doc false
def put_error(%UploadConfig{} = conf, _entry_ref, @too_many_files = reason) do
%UploadConfig{conf | errors: Enum.uniq(conf.errors ++ [{conf.ref, reason}])}
end
def put_error(%UploadConfig{} = conf, entry_ref, reason) do
%UploadConfig{conf | errors: conf.errors ++ [{entry_ref, reason}]}
end
@doc false
def cancel_entry(%UploadConfig{} = conf, %UploadEntry{} = entry) do
case entry_pid(conf, entry) do
channel_pid when is_pid(channel_pid) ->
Phoenix.LiveView.UploadChannel.cancel(channel_pid)
update_entry(conf, entry.ref, fn entry -> %UploadEntry{entry | cancelled?: true} end)
_ ->
drop_entry(conf, entry)
end
end
@doc false
def drop_entry(%UploadConfig{} = conf, %UploadEntry{ref: ref}) do
new_entries = for entry <- conf.entries, entry.ref != ref, do: entry
new_errors = Enum.filter(conf.errors, fn {error_ref, _} -> error_ref != ref end)
new_refs = Map.delete(conf.entry_refs_to_pids, ref)
new_metas = Map.delete(conf.entry_refs_to_metas, ref)
new_conf = %UploadConfig{
conf
| entries: new_entries,
errors: new_errors,
entry_refs_to_pids: new_refs,
entry_refs_to_metas: new_metas
}
recalculate_computed_fields(new_conf)
end
@doc false
def register_cid(%UploadConfig{} = conf, cid) do
%UploadConfig{conf | cid: cid}
end
# UUID generation
# Copyright (c) 2013 Plataformatec
# Copyright (c) 2020 Dashbit
# https://github.com/elixir-ecto/ecto/blob/99dff4c4403c258ea939fe9bdfb4e339baf05e13/lib/ecto/uuid.ex
defp generate_uuid do
<<u0::48, _::4, u1::12, _::2, u2::62>> = :crypto.strong_rand_bytes(16)
bin = <<u0::48, 4::4, u1::12, 2::2, u2::62>>
<<a1::4, a2::4, a3::4, a4::4, a5::4, a6::4, a7::4, a8::4, b1::4, b2::4, b3::4, b4::4, c1::4,
c2::4, c3::4, c4::4, d1::4, d2::4, d3::4, d4::4, e1::4, e2::4, e3::4, e4::4, e5::4, e6::4,
e7::4, e8::4, e9::4, e10::4, e11::4, e12::4>> = bin
<<e(a1), e(a2), e(a3), e(a4), e(a5), e(a6), e(a7), e(a8), ?-, e(b1), e(b2), e(b3), e(b4), ?-,
e(c1), e(c2), e(c3), e(c4), ?-, e(d1), e(d2), e(d3), e(d4), ?-, e(e1), e(e2), e(e3), e(e4),
e(e5), e(e6), e(e7), e(e8), e(e9), e(e10), e(e11), e(e12)>>
end
@compile {:inline, e: 1}
defp e(0), do: ?0
defp e(1), do: ?1
defp e(2), do: ?2
defp e(3), do: ?3
defp e(4), do: ?4
defp e(5), do: ?5
defp e(6), do: ?6
defp e(7), do: ?7
defp e(8), do: ?8
defp e(9), do: ?9
defp e(10), do: ?a
defp e(11), do: ?b
defp e(12), do: ?c
defp e(13), do: ?d
defp e(14), do: ?e
defp e(15), do: ?f
end
| 29.238976 | 102 | 0.603211 |
f7aad56368db5fe03a5d47a3bd30d5f7e101dcc8 | 5,940 | ex | Elixir | lib/crontab/date_helper.ex | h4cc/crontab | 05f901bcf3a8eeec5fafb59b2015ebf8176dcbf7 | [
"MIT"
] | 71 | 2017-01-17T12:43:20.000Z | 2022-03-11T09:31:32.000Z | lib/crontab/date_helper.ex | h4cc/crontab | 05f901bcf3a8eeec5fafb59b2015ebf8176dcbf7 | [
"MIT"
] | 70 | 2017-01-18T08:49:19.000Z | 2022-03-31T00:34:47.000Z | lib/crontab/date_helper.ex | h4cc/crontab | 05f901bcf3a8eeec5fafb59b2015ebf8176dcbf7 | [
"MIT"
] | 29 | 2017-01-02T07:24:11.000Z | 2022-03-11T15:57:56.000Z | defmodule Crontab.DateHelper do
@moduledoc false
@type unit :: :year | :month | :day | :hour | :minute | :second | :microsecond
@units [
{:year, {nil, nil}},
{:month, {1, 12}},
{:day, {1, :end_onf_month}},
{:hour, {0, 23}},
{:minute, {0, 59}},
{:second, {0, 59}},
{:microsecond, {{0, 0}, {999_999, 6}}}
]
@doc """
Get Start of a period of a date.
## Examples
iex> Crontab.DateHelper.beginning_of(~N[2016-03-14 01:45:45.123], :year)
~N[2016-01-01 00:00:00]
"""
@spec beginning_of(NaiveDateTime.t(), unit) :: NaiveDateTime.t()
def beginning_of(date, unit) do
_beginning_of(date, proceeding_units(unit))
end
@doc """
Get the end of a period of a date.
## Examples
iex> Crontab.DateHelper.end_of(~N[2016-03-14 01:45:45.123], :year)
~N[2016-12-31 23:59:59.999999]
"""
@spec end_of(NaiveDateTime.t(), unit) :: NaiveDateTime.t()
def end_of(date, unit) do
_end_of(date, proceeding_units(unit))
end
@doc """
Find the last occurrence of weekday in month.
"""
@spec last_weekday(NaiveDateTime.t(), Calendar.day_of_week()) :: Calendar.day()
def last_weekday(date, weekday) do
date
|> end_of(:month)
|> last_weekday(weekday, :end)
end
@doc """
Find the nth weekday of month.
"""
@spec nth_weekday(NaiveDateTime.t(), Calendar.day_of_week(), integer) :: Calendar.day()
def nth_weekday(date, weekday, n) do
date
|> beginning_of(:month)
|> nth_weekday(weekday, n, :start)
end
@doc """
Find the last occurrence of weekday in month.
"""
@spec last_weekday_of_month(NaiveDateTime.t()) :: Calendar.day()
def last_weekday_of_month(date) do
last_weekday_of_month(end_of(date, :month), :end)
end
@doc """
Find the next occurrence of weekday relative to date.
"""
@spec next_weekday_to(NaiveDateTime.t()) :: Calendar.day()
def next_weekday_to(date = %NaiveDateTime{year: year, month: month, day: day}) do
weekday = :calendar.day_of_the_week(year, month, day)
next_day = NaiveDateTime.add(date, 86_400, :second)
previous_day = NaiveDateTime.add(date, -86_400, :second)
cond do
weekday == 7 && next_day.month == date.month -> next_day.day
weekday == 7 -> NaiveDateTime.add(date, -86_400 * 2, :second).day
weekday == 6 && previous_day.month == date.month -> previous_day.day
weekday == 6 -> NaiveDateTime.add(date, 86_400 * 2, :second).day
true -> date.day
end
end
@spec inc_year(NaiveDateTime.t()) :: NaiveDateTime.t()
def inc_year(date) do
leap_year? =
date
|> NaiveDateTime.to_date()
|> Date.leap_year?()
if leap_year? do
NaiveDateTime.add(date, 366 * 86_400, :second)
else
NaiveDateTime.add(date, 365 * 86_400, :second)
end
end
@spec dec_year(NaiveDateTime.t()) :: NaiveDateTime.t()
def dec_year(date) do
leap_year? =
date
|> NaiveDateTime.to_date()
|> Date.leap_year?()
if leap_year? do
NaiveDateTime.add(date, -366 * 86_400, :second)
else
NaiveDateTime.add(date, -365 * 86_400, :second)
end
end
@spec inc_month(NaiveDateTime.t()) :: NaiveDateTime.t()
def inc_month(date = %NaiveDateTime{day: day}) do
days =
date
|> NaiveDateTime.to_date()
|> Date.days_in_month()
NaiveDateTime.add(date, (days + 1 - day) * 86_400, :second)
end
@spec dec_month(NaiveDateTime.t()) :: NaiveDateTime.t()
def dec_month(date) do
days =
date
|> NaiveDateTime.to_date()
|> Date.days_in_month()
NaiveDateTime.add(date, days * -86_400, :second)
end
@spec _beginning_of(NaiveDateTime.t(), [{unit, {any, any}}]) :: NaiveDateTime.t()
defp _beginning_of(date, [{unit, {lower, _}} | tail]) do
_beginning_of(Map.put(date, unit, lower), tail)
end
defp _beginning_of(date, []), do: date
@spec _end_of(NaiveDateTime.t(), [{unit, {any, any}}]) :: NaiveDateTime.t()
defp _end_of(date, [{unit, {_, :end_onf_month}} | tail]) do
upper =
date
|> NaiveDateTime.to_date()
|> Date.days_in_month()
_end_of(Map.put(date, unit, upper), tail)
end
defp _end_of(date, [{unit, {_, upper}} | tail]) do
_end_of(Map.put(date, unit, upper), tail)
end
defp _end_of(date, []), do: date
@spec proceeding_units(unit) :: [{unit, {any, any}}]
defp proceeding_units(unit) do
[_ | units] =
@units
|> Enum.reduce([], fn {key, value}, acc ->
cond do
Enum.count(acc) > 0 ->
Enum.concat(acc, [{key, value}])
key == unit ->
[{key, value}]
true ->
[]
end
end)
units
end
@spec nth_weekday(NaiveDateTime.t(), Calendar.day_of_week(), :start) :: boolean
defp nth_weekday(date = %NaiveDateTime{}, _, 0, :start),
do: NaiveDateTime.add(date, -86_400, :second).day
defp nth_weekday(date = %NaiveDateTime{year: year, month: month, day: day}, weekday, n, :start) do
if :calendar.day_of_the_week(year, month, day) == weekday do
nth_weekday(NaiveDateTime.add(date, 86_400, :second), weekday, n - 1, :start)
else
nth_weekday(NaiveDateTime.add(date, 86_400, :second), weekday, n, :start)
end
end
@spec last_weekday_of_month(NaiveDateTime.t(), :end) :: Calendar.day()
defp last_weekday_of_month(date = %NaiveDateTime{year: year, month: month, day: day}, :end) do
weekday = :calendar.day_of_the_week(year, month, day)
if weekday > 5 do
last_weekday_of_month(NaiveDateTime.add(date, -86_400, :second), :end)
else
day
end
end
@spec last_weekday(NaiveDateTime.t(), non_neg_integer, :end) :: Calendar.day()
defp last_weekday(date = %NaiveDateTime{year: year, month: month, day: day}, weekday, :end) do
if :calendar.day_of_the_week(year, month, day) == weekday do
day
else
last_weekday(NaiveDateTime.add(date, -86_400, :second), weekday, :end)
end
end
end
| 27.887324 | 100 | 0.625421 |
f7ab1777c41ab2f27efa775c612afaa6b025fd6f | 212 | ex | Elixir | lib/shutdown_flag.ex | cogini/shutdown_flag | 1045223cc764ff5b0efc2b7b89445299ddc4ef90 | [
"Apache-2.0"
] | 5 | 2018-05-20T01:49:48.000Z | 2020-01-13T23:22:09.000Z | lib/shutdown_flag.ex | cogini/shutdown_flag | 1045223cc764ff5b0efc2b7b89445299ddc4ef90 | [
"Apache-2.0"
] | null | null | null | lib/shutdown_flag.ex | cogini/shutdown_flag | 1045223cc764ff5b0efc2b7b89445299ddc4ef90 | [
"Apache-2.0"
] | null | null | null | defmodule ShutdownFlag do
@moduledoc """
Documentation for ShutdownFlag.
"""
@doc """
Hello world.
## Examples
iex> ShutdownFlag.hello
:world
"""
def hello do
:world
end
end
| 11.157895 | 33 | 0.599057 |
f7ab231500b25f0cdac0aa02bb666be77452154c | 7,695 | exs | Elixir | test/bubble_match/sentence_test.exs | botsquad/bubble-match | c90325fb7ffdcd0b96ae0fd8f8aa0b3ebbe3ea0e | [
"MIT"
] | 20 | 2020-05-20T14:27:56.000Z | 2022-01-15T18:43:34.000Z | test/bubble_match/sentence_test.exs | botsquad/bubble-match | c90325fb7ffdcd0b96ae0fd8f8aa0b3ebbe3ea0e | [
"MIT"
] | 3 | 2020-05-22T18:50:34.000Z | 2020-06-23T14:58:04.000Z | test/bubble_match/sentence_test.exs | botsquad/bubble-match | c90325fb7ffdcd0b96ae0fd8f8aa0b3ebbe3ea0e | [
"MIT"
] | 1 | 2021-05-19T09:06:54.000Z | 2021-05-19T09:06:54.000Z | defmodule BubbleMatch.SentenceTest do
use ExUnit.Case
alias BubbleMatch.{Entity, Sentence}
@spacy_json """
{"ents":[{"end":27,"label":"PERSON","start":21}],"sents":[{"end":9,"start":0},{"end":27,"start":10}],"text":"Hi there. My name is George","tokens":[{"dep":"ROOT","end":2,"head":0,"id":0,"lemma":"hi","norm":"hi","pos":"INTJ","start":0,"string":"Hi ","tag":"UH"},{"dep":"advmod","end":8,"head":0,"id":1,"lemma":"there","norm":"there","pos":"ADV","start":3,"string":"there","tag":"RB"},{"dep":"punct","end":9,"head":0,"id":2,"lemma":".","norm":".","pos":"PUNCT","start":8,"string":". ","tag":"."},{"dep":"poss","end":12,"head":4,"id":3,"lemma":"-PRON-","norm":"my","pos":"DET","start":10,"string":"My ","tag":"PRP$"},{"dep":"nsubj","end":17,"head":5,"id":4,"lemma":"name","norm":"name","pos":"NOUN","start":13,"string":"name ","tag":"NN"},{"dep":"ROOT","end":20,"head":5,"id":5,"lemma":"be","norm":"is","pos":"AUX","start":18,"string":"is ","tag":"VBZ"},{"dep":"attr","end":27,"head":5,"id":6,"lemma":"George","norm":"george","pos":"PROPN","start":21,"string":"George","tag":"NNP"}]}
"""
|> Jason.decode!()
test "from_spacy" do
[hithere, mynameis] = Sentence.sentences_from_spacy(@spacy_json)
assert [_, [_, _, _]] = hithere.tokenizations
assert [with_ents, raw_tokens] = mynameis.tokenizations
assert ~w(my name is george) == Enum.map(raw_tokens, & &1.value["norm"])
assert ~w(spacy spacy spacy entity)a == Enum.map(with_ents, & &1.type)
assert [_, _, _, %{value: %Entity{value: "George"}}] = with_ents
end
test "match from spacy" do
all = [hithere, mynameis] = Sentence.sentences_from_spacy(@spacy_json)
assert {:match, _} = BubbleMatch.Matcher.match("%NOUN", mynameis)
assert {:match, _} = BubbleMatch.Matcher.match("my name is", mynameis)
assert :nomatch = BubbleMatch.Matcher.match("my name is", hithere)
assert {:match, _} = BubbleMatch.Matcher.match("[Start] my name is", all)
assert {:match, _} = BubbleMatch.Matcher.match("hi there \".\" [End]", all)
assert {:match, m} = BubbleMatch.Matcher.match("[person]", all)
assert [%{value: %{kind: "person", value: "George"}}] = m["person"]
end
@hello_world_json """
{"text": "Hello, w\u00f3rld", "ents": [], "sents": [{"start": 0, "end": 12}], "tokens": [{"id": 0, "start": 0, "end": 5, "pos": "INTJ", "tag": "UH", "dep": "ROOT", "head": 0, "string": "Hello", "lemma": "hello", "norm": "hello"}, {"id": 1, "start": 5, "end": 6, "pos": "PUNCT", "tag": ",", "dep": "punct", "head": 2, "string": ", ", "lemma": ",", "norm": ","}, {"id": 2, "start": 7, "end": 12, "pos": "NOUN", "tag": "NN", "dep": "npadvmod", "head": 0, "string": "w\u00f3rld", "lemma": "w\u00f3rld", "norm": "w\u00f3rld"}]}
"""
|> Jason.decode!()
test "spacy ignore punctuation, strip accents" do
[sent] = Sentence.sentences_from_spacy(@hello_world_json)
assert {:match, _} = BubbleMatch.Matcher.match("hello world", sent)
end
@duckling_json """
[{"body":"the day after tomorrow","start":15,"value":{"values":[{"value":"2020-04-30T00:00:00.000+02:00","grain":"day","type":"value"}],"value":"2020-04-30T00:00:00.000+02:00","grain":"day","type":"value"},"end":37,"dim":"time","latent":false},{"body":"10 miles","start":39,"value":{"value":10,"type":"value","unit":"mile"},"end":47,"dim":"distance","latent":false}]
"""
|> Jason.decode!()
test "add duckling entities" do
sentence =
Sentence.naive_tokenize("My birthday is the day after tomorrow, 10 miles away")
|> Sentence.add_duckling_entities(@duckling_json)
assert [with_ents, with_ents_punct | _] = sentence.tokenizations
assert [
%{value: "my"},
%{value: "birthday"},
%{value: "is"},
%{
type: :entity,
raw: "the day after tomorrow",
value: %Entity{kind: "time", value: "2020-04" <> _, extra: %{"grain" => "day"}}
},
%{
type: :entity,
raw: "10 miles",
value: %Entity{kind: "distance", value: 10, extra: %{"unit" => "mile"}}
},
_awai
] = with_ents
assert [
%{value: "my"},
%{value: "birthday"},
%{value: "is"},
%{
type: :entity,
value: %Entity{kind: "time", value: "2020-04" <> _, extra: %{"grain" => "day"}}
},
%{value: ","},
%{
type: :entity,
value: %Entity{kind: "distance", value: 10, extra: %{"unit" => "mile"}}
},
_awai
] = with_ents_punct
end
test "encoding" do
[hithere, _] = Sentence.sentences_from_spacy(@spacy_json)
assert {:ok, encoded} = Jason.encode(hithere)
assert "{\"__struct__\":" <> _ = encoded
end
test "access; to_string" do
[hithere, _] = Sentence.sentences_from_spacy(@spacy_json)
assert "Hi there." == hithere["text"]
assert "Hi there." == hithere[:text]
assert "Hi there." == to_string(hithere)
end
@time_duckling """
[{"body":"9 p.m.","start":0,"value":{"values":[{"value":"2020-06-10T21:00:00.000+02:00","grain":"hour","type":"value"},{"value":"2020-06-11T21:00:00.000+02:00","grain":"hour","type":"value"},{"value":"2020-06-12T21:00:00.000+02:00","grain":"hour","type":"value"}],"value":"2020-06-10T21:00:00.000+02:00","grain":"hour","type":"value"},"end":6,"dim":"time","latent":false}]
"""
|> Jason.decode!()
@time_spacy """
{"text":"9 p.m.","ents":[],"sents":[{"start":0,"end":1},{"start":2,"end":6}],"tokens":[{"id":0,"start":0,"end":1,"pos":"NUM","tag":"CD","dep":"ROOT","head":0,"string":"9 ","lemma":"9","norm":"9"},{"id":1,"start":2,"end":6,"pos":"NOUN","tag":"NN","dep":"ROOT","head":1,"string":"p.m.","lemma":"p.m.","norm":"p.m."}]}
"""
|> Jason.decode!()
test "overlapping duckling entities" do
[a, b] = Sentence.sentences_from_spacy(@time_spacy)
assert [_] = a.tokenizations
a = a |> Sentence.add_duckling_entities(@time_duckling)
assert [with_ents, _raw_tokens] = a.tokenizations
assert List.first(with_ents).value.kind == "time"
assert [_] = b.tokenizations
b = b |> Sentence.add_duckling_entities(@time_duckling)
assert [with_ents, _raw_tokens] = b.tokenizations
assert List.first(with_ents).value.kind == "time"
end
@spacy_json """
{"detected_language": null, "detected_language_prob": 0.12450417876243591, "ents": [], "nlp_language": "en", "sents": [{"end": 8, "start": 0}], "text": "Thanks 👍", "tokens": [{"dep": "compound", "end": 6, "head": 1, "id": 0, "lemma": "thanks", "norm": "thanks", "pos": "INTJ", "start": 0, "string": "Thanks ", "tag": "UH"}, {"dep": "ROOT", "end": 8, "head": 1, "id": 1, "lemma": "👍", "norm": "👍", "pos": "PROPN", "start": 7, "string": "👍", "tag": "NNP"}]}
"""
|> Jason.decode!()
test "Emoji can be matched" do
[s = %{tokenizations: [_tok, with_punct]}] = Sentence.sentences_from_spacy(@spacy_json)
assert [_, %{value: %{"pos" => "EMOJI"}}] = with_punct
assert {:match, _} = BubbleMatch.Matcher.match("%EMOJI", s)
assert {:match, _} = BubbleMatch.Matcher.match("%EMOJI [End]", s)
assert {:match, _} = BubbleMatch.Matcher.match("[Start] thanks [End]", s)
assert {:match, _} = BubbleMatch.Matcher.match("[Start] thanks %EMOJI [End]", s)
assert {:match, _} = BubbleMatch.Matcher.match("'👍'", s)
end
end
| 50.625 | 994 | 0.544639 |
f7ab32d5f99c1f59edc6019f70efa223dd6b0f14 | 700 | exs | Elixir | config/test.exs | JeffyMesquita/elixirHeat | 3ec3c59021e90058f00c2eb288a5e6c286e96342 | [
"MIT"
] | null | null | null | config/test.exs | JeffyMesquita/elixirHeat | 3ec3c59021e90058f00c2eb288a5e6c286e96342 | [
"MIT"
] | null | null | null | config/test.exs | JeffyMesquita/elixirHeat | 3ec3c59021e90058f00c2eb288a5e6c286e96342 | [
"MIT"
] | null | null | null | use Mix.Config
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
config :elixirHeat, ElixirHeat.Repo,
username: "postgres",
password: "Je4217789|4217789|",
database: "elixirheat_test#{System.get_env("MIX_TEST_PARTITION")}",
hostname: "localhost",
port: 9999,
pool: Ecto.Adapters.SQL.Sandbox
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :elixirHeat, ElixirHeatWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 29.166667 | 69 | 0.751429 |
f7ab3a70a2591a37df09339f5ee10fa16931ba65 | 1,029 | exs | Elixir | mix.exs | akoutmos/ex_todo | f1aaf2ccb4ecdefb3fb48a36ef2cc76a0d9045fa | [
"MIT"
] | 14 | 2019-06-18T04:56:18.000Z | 2021-03-22T07:59:43.000Z | mix.exs | akoutmos/ex_todo | f1aaf2ccb4ecdefb3fb48a36ef2cc76a0d9045fa | [
"MIT"
] | null | null | null | mix.exs | akoutmos/ex_todo | f1aaf2ccb4ecdefb3fb48a36ef2cc76a0d9045fa | [
"MIT"
] | null | null | null | defmodule ExTodo.MixProject do
use Mix.Project
def project do
[
app: :ex_todo,
version: "0.1.0",
elixir: "~> 1.7",
name: "ExTodo",
source_url: "https://github.com/akoutmos/ex_todo",
homepage_url: "https://hex.pm/packages/ex_todo",
description: "A simple utility to find codetags within a project",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
docs: [
main: "readme",
extras: ["README.md"]
],
package: package(),
deps: deps()
]
end
def application do
[
extra_applications: [:logger]
]
end
defp package() do
[
name: "ex_todo",
files: ~w(lib mix.exs README.md LICENSE CHANGELOG.md),
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/akoutmos/ex_todo"}
]
end
defp elixirc_paths(:test), do: ["lib", "test/sample_files"]
defp elixirc_paths(_), do: ["lib"]
defp deps do
[
{:ex_doc, ">= 0.0.0"}
]
end
end
| 21.4375 | 72 | 0.564626 |
f7ab44328cfdc35fe90a599211dc214a2074e2d1 | 2,078 | ex | Elixir | lib/mix/demo/ci.ex | bitpal/bitpal_demo | 989df08f60dadc5d4e340fef91890cf8bb3106ad | [
"BSD-3-Clause-Clear"
] | null | null | null | lib/mix/demo/ci.ex | bitpal/bitpal_demo | 989df08f60dadc5d4e340fef91890cf8bb3106ad | [
"BSD-3-Clause-Clear"
] | 1 | 2021-06-04T13:53:01.000Z | 2021-06-04T13:53:01.000Z | lib/mix/demo/ci.ex | bitpal/bitpal_demo | 989df08f60dadc5d4e340fef91890cf8bb3106ad | [
"BSD-3-Clause-Clear"
] | null | null | null | defmodule Mix.Tasks.Demo.Ci do
@moduledoc """
CI, mostly as generated from `mix ci.init`
"""
use Mix.Task
import IO.ANSI
alias Job.Pipeline
@impl Mix.Task
def run(_args) do
{:ok, _} = Application.ensure_all_started(:ci)
setup_telemetry()
Job.run(
Pipeline.sequence([
mix("compile --warnings-as-errors"),
Pipeline.parallel([
mix("test"),
mix("format --check-formatted"),
mix("credo --all --strict")
])
]),
timeout: :timer.minutes(10),
telemetry_id: [:ci]
)
|> report_errors()
end
defp mix(arg, opts \\ []),
do: cmd("mix #{arg}", Config.Reader.merge([env: [mix_env: "test"]], opts))
defp cmd(cmd, opts) do
handler = &IO.write(message(&1, cmd))
cmd_opts = [handler: handler, telemetry_id: [:cmd]] ++ Keyword.merge([pty: true], opts)
OsCmd.action(cmd, cmd_opts)
end
defp message(:starting, cmd), do: [blue(), "starting #{cmd}\n", reset()]
defp message({:output, output}, _cmd), do: output
defp message({:stopped, _status}, _cmd), do: ""
defp setup_telemetry,
do: :telemetry.attach_many("handler", [[:ci, :stop], [:cmd, :stop]], &report_duration/4, nil)
defp report_duration(event, %{duration: duration}, meta, _config),
do: info("#{event_name(event, meta)} took #{format_duration(duration)} seconds")
defp event_name([:ci, :stop], _meta), do: "CI checks"
defp event_name([:cmd, :stop], meta), do: meta.cmd
defp format_duration(duration) do
duration = div(System.convert_time_unit(duration, :native, :millisecond), 100)
if rem(duration, 10) == 0, do: div(duration, 10), else: Float.round(duration / 10, 1)
end
defp report_errors({:ok, _}), do: info("All the checks have passed 🎉")
defp report_errors({:error, errors}),
do: [errors] |> List.flatten() |> Enum.map(&error/1) |> Enum.join("\n") |> Mix.raise()
defp error(%OsCmd.Error{message: message}), do: message
defp error(other), do: inspect(other)
defp info(message), do: Mix.shell().info([bright(), blue(), message, reset()])
end
| 31.014925 | 97 | 0.622714 |
f7ab62800e37720d022924fbe60708edfa9c082b | 438 | exs | Elixir | apps/core/priv/repo/migrations/20190123162535_declaration_request_drop_unused_indexes.exs | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 8 | 2019-06-14T11:34:49.000Z | 2021-08-05T19:14:24.000Z | apps/core/priv/repo/migrations/20190123162535_declaration_request_drop_unused_indexes.exs | edenlabllc/ehealth.api.public | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 1 | 2019-07-08T15:20:22.000Z | 2019-07-08T15:20:22.000Z | apps/core/priv/repo/migrations/20190123162535_declaration_request_drop_unused_indexes.exs | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 6 | 2018-05-11T13:59:32.000Z | 2022-01-19T20:15:22.000Z | defmodule Core.Repo.Migrations.DeclarationRequestDropUnusedIndexes do
use Ecto.Migration
@disable_ddl_transaction true
def change do
execute("DROP INDEX IF EXISTS cabinet_declaration_req_index;")
execute("DROP INDEX IF EXISTS create_declatation_req_index;")
execute("DROP INDEX IF EXISTS data_legal_entity_id_inserted_at_index;")
execute("DROP INDEX IF EXISTS data_legal_entity_id_inserted_at_index_1;")
end
end
| 33.692308 | 77 | 0.810502 |
f7ab6da50fd940b5691dbb2cb35bb5f5a8fe9442 | 6,250 | ex | Elixir | deps/mariaex/lib/mariaex/query.ex | rifkyaziz/csvparser | 2049f51a7ca5ace7b357027f09beda64caacef45 | [
"MIT"
] | 1 | 2017-11-27T06:00:32.000Z | 2017-11-27T06:00:32.000Z | deps/mariaex/lib/mariaex/query.ex | rifkyaziz/csvparser | 2049f51a7ca5ace7b357027f09beda64caacef45 | [
"MIT"
] | null | null | null | deps/mariaex/lib/mariaex/query.ex | rifkyaziz/csvparser | 2049f51a7ca5ace7b357027f09beda64caacef45 | [
"MIT"
] | null | null | null | defmodule Mariaex.Query do
@moduledoc """
Query struct returned from a successfully prepared query. Its fields are:
* `name` - The name of the prepared statement;
* `statement` - The prepared statement;
* `num_params` - The number of parameters;
* `ref` - Reference that uniquely identifies when the query was prepared;
"""
defstruct name: "",
reserved?: false,
binary_as: nil,
type: nil,
statement: "",
num_params: nil,
ref: nil
end
defimpl DBConnection.Query, for: Mariaex.Query do
@moduledoc """
Implementation of `DBConnection.Query` protocol.
"""
use Bitwise
import Mariaex.Coder.Utils
alias Mariaex.Messages
alias Mariaex.Column
@doc """
Parse a query.
This function is called to parse a query term before it is prepared.
"""
def parse(%{name: name, statement: statement, ref: nil} = query, _) do
%{query | name: IO.iodata_to_binary(name), statement: IO.iodata_to_binary(statement)}
end
@doc """
Describe a query.
This function is called to describe a query after it is prepared.
"""
def describe(query, _res) do
query
end
@doc """
Encode parameters using a query.
This function is called to encode a query before it is executed.
"""
def encode(%Mariaex.Query{type: nil} = query, _params, _opts) do
raise ArgumentError, "query #{inspect query} has not been prepared"
end
def encode(%Mariaex.Query{num_params: num_params} = query, params, _opts)
when length(params) != num_params do
raise ArgumentError, "parameters must be of length #{num_params} for query #{inspect query}"
end
def encode(%Mariaex.Query{type: :binary, binary_as: binary_as}, params, _opts) do
parameters_to_binary(params, binary_as)
end
def encode(%Mariaex.Query{type: :text}, [], _opts) do
[]
end
defp parameters_to_binary([], _binary_as), do: <<>>
defp parameters_to_binary(params, binary_as) do
set = {0, 0, <<>>, <<>>}
{nullint, len, typesbin, valuesbin} = Enum.reduce(params, set, fn(p, acc) -> encode_params(p, acc, binary_as) end)
nullbin_size = div(len + 7, 8)
<< nullint :: size(nullbin_size)-little-unit(8), 1 :: 8, typesbin :: binary, valuesbin :: binary >>
end
defp encode_params(param, {nullint, idx, typesbin, valuesbin}, binary_as) do
{nullvalue, type, value} = encode_param(param, binary_as)
types_part = case type do
:field_type_longlong ->
# Set the unsigned byte if value > 2^63 (bigint's max signed value).
if param > 9_223_372_036_854_775_807 do
<< typesbin :: binary, 0x8008 :: 16-little >>
else
<< typesbin :: binary, 0x08 :: 16-little >>
end
_ ->
<< typesbin :: binary, Messages.__type__(:id, type) :: 16-little >>
end
{
nullint ||| (nullvalue <<< idx),
idx + 1,
types_part,
<< valuesbin :: binary, value :: binary >>
}
end
defp encode_param(nil, _binary_as),
do: {1, :field_type_null, ""}
defp encode_param(bin, binary_as) when is_binary(bin),
do: {0, binary_as, << to_length_encoded_integer(byte_size(bin)) :: binary, bin :: binary >>}
defp encode_param(int, _binary_as) when is_integer(int),
do: {0, :field_type_longlong, << int :: 64-little >>}
defp encode_param(float, _binary_as) when is_float(float),
do: {0, :field_type_double, << float :: 64-little-float >>}
defp encode_param(true, _binary_as),
do: {0, :field_type_tiny, << 01 >>}
defp encode_param(false, _binary_as),
do: {0, :field_type_tiny, << 00 >>}
defp encode_param(%Decimal{} = value, _binary_as) do
bin = Decimal.to_string(value, :normal)
{0, :field_type_newdecimal, << to_length_encoded_integer(byte_size(bin)) :: binary, bin :: binary >>}
end
defp encode_param({year, month, day}, _binary_as),
do: {0, :field_type_date, << 4::8-little, year::16-little, month::8-little, day::8-little>>}
defp encode_param({hour, min, sec, 0}, _binary_as),
do: {0, :field_type_time, << 8 :: 8-little, 0 :: 8-little, 0 :: 32-little, hour :: 8-little, min :: 8-little, sec :: 8-little >>}
defp encode_param({hour, min, sec, msec}, _binary_as),
do: {0, :field_type_time, << 12 :: 8-little, 0 :: 8-little, 0 :: 32-little, hour :: 8-little, min :: 8-little, sec :: 8-little, msec :: 32-little>>}
defp encode_param({{year, month, day}, {hour, min, sec}}, _binary_as),
do: {0, :field_type_datetime, << 7::8-little, year::16-little, month::8-little, day::8-little, hour::8-little, min::8-little, sec::8-little>>}
defp encode_param({{year, month, day}, {hour, min, sec, 0}}, _binary_as),
do: {0, :field_type_datetime, << 7::8-little, year::16-little, month::8-little, day::8-little, hour::8-little, min::8-little, sec::8-little>>}
defp encode_param({{year, month, day}, {hour, min, sec, msec}}, _binary_as),
do: {0, :field_type_datetime, <<11::8-little, year::16-little, month::8-little, day::8-little, hour::8-little, min::8-little, sec::8-little, msec::32-little>>}
defp encode_param(other, _binary_as),
do: raise ArgumentError, "query has invalid parameter #{inspect other}"
def decode(_, {res, nil}, _) do
%Mariaex.Result{res | rows: nil}
end
def decode(_, {res, columns}, opts) do
%Mariaex.Result{rows: rows} = res
decoded = do_decode(rows, opts)
include_table_name = opts[:include_table_name]
columns = for %Column{} = column <- columns, do: column_name(column, include_table_name)
%Mariaex.Result{res | rows: decoded, columns: columns, num_rows: length(decoded)}
end
## helpers
defp column_name(%Column{name: name, table: table}, true), do: "#{table}.#{name}"
defp column_name(%Column{name: name}, _), do: name
defp do_decode(rows, opts) do
case Keyword.get(opts, :decode_mapper) do
nil ->
Enum.reverse(rows)
mapper when is_function(mapper, 1) ->
do_decode(rows, mapper, [])
end
end
defp do_decode([row | rows], mapper, acc) do
do_decode(rows, mapper, [mapper.(row) | acc])
end
defp do_decode([], _, acc) do
acc
end
end
defimpl String.Chars, for: Mariaex.Query do
def to_string(%Mariaex.Query{statement: statement}) do
IO.iodata_to_binary(statement)
end
end
| 37.42515 | 163 | 0.64912 |
f7ab7a307f9f277559a60e7f47322bbc3c89fb3f | 334 | exs | Elixir | apps/artemis/priv/repo/migrations/20190201222007_create_roles.exs | chrislaskey/atlas_platform | 969aea95814f62d3471f93000ee5ad77edb9d1bf | [
"MIT"
] | 10 | 2019-07-05T19:59:20.000Z | 2021-05-23T07:36:11.000Z | apps/artemis/priv/repo/migrations/20190201222007_create_roles.exs | chrislaskey/atlas_platform | 969aea95814f62d3471f93000ee5ad77edb9d1bf | [
"MIT"
] | 7 | 2019-07-12T21:41:01.000Z | 2020-08-17T21:29:22.000Z | apps/artemis/priv/repo/migrations/20190201222007_create_roles.exs | chrislaskey/atlas_platform | 969aea95814f62d3471f93000ee5ad77edb9d1bf | [
"MIT"
] | 4 | 2019-07-05T20:04:08.000Z | 2021-05-13T16:28:33.000Z | defmodule Artemis.Repo.Migrations.CreateRoles do
use Ecto.Migration
def change do
create table(:roles) do
add :description, :text
add :name, :string
add :slug, :string
timestamps(type: :utc_datetime)
end
create unique_index(:roles, [:slug])
create unique_index(:roles, [:name])
end
end
| 20.875 | 48 | 0.661677 |
f7abd7f8f2d33f5f73a0f0edcae54d97e2f60c76 | 1,961 | ex | Elixir | clients/document_ai/lib/google_api/document_ai/v1/model/google_cloud_documentai_v1_list_processors_response.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/document_ai/lib/google_api/document_ai/v1/model/google_cloud_documentai_v1_list_processors_response.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/document_ai/lib/google_api/document_ai/v1/model/google_cloud_documentai_v1_list_processors_response.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1ListProcessorsResponse do
@moduledoc """
Response message for list processors.
## Attributes
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - Points to the next processor, otherwise empty.
* `processors` (*type:* `list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1Processor.t)`, *default:* `nil`) - The list of processors.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:nextPageToken => String.t() | nil,
:processors =>
list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1Processor.t()) | nil
}
field(:nextPageToken)
field(:processors,
as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1Processor,
type: :list
)
end
defimpl Poison.Decoder,
for: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1ListProcessorsResponse do
def decode(value, options) do
GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1ListProcessorsResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1ListProcessorsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.683333 | 145 | 0.740439 |
f7abdda0f8dcbbd1b380f6125abd9c802b5bdfe0 | 491 | ex | Elixir | lib/ppc/subscription/subscriber_request.ex | somatoko/ppc | 464379c3e56132519a7fc53717071c07d06aca85 | [
"Apache-2.0"
] | null | null | null | lib/ppc/subscription/subscriber_request.ex | somatoko/ppc | 464379c3e56132519a7fc53717071c07d06aca85 | [
"Apache-2.0"
] | null | null | null | lib/ppc/subscription/subscriber_request.ex | somatoko/ppc | 464379c3e56132519a7fc53717071c07d06aca85 | [
"Apache-2.0"
] | null | null | null | defmodule Ppc.Subscription.SubscriberRequest do
alias Ppc.Subscription.{PayerName, PhoneWithType, ShippingDetail}
defstruct [
:name,
:email_address,
:payer_id,
:phone,
:shipping_address,
:payment_source
]
@type t :: %__MODULE__{
name: PayerName.t(),
email_address: String.t(),
payer_id: String.t(),
phone: PhoneWithType.t(),
shipping_address: ShippingDetail.t(),
payment_source: any
}
end
| 22.318182 | 67 | 0.615071 |
f7ac1220914cf74e6cdfc9d0eba8df811f1778bb | 71 | exs | Elixir | apps/publishing/test/test_helper.exs | pinpox/branchpage | e478ed9085c06cc3c5680b0ca4dc20eff2e74653 | [
"MIT"
] | 49 | 2021-06-06T05:40:30.000Z | 2021-08-23T04:50:46.000Z | apps/publishing/test/test_helper.exs | felipelincoln/mvp | 6f3ca7dfafe884af40883f84f3eb825bb061c974 | [
"MIT"
] | 40 | 2021-01-09T16:50:50.000Z | 2021-10-01T16:27:35.000Z | apps/publishing/test/test_helper.exs | felipelincoln/mvp | 6f3ca7dfafe884af40883f84f3eb825bb061c974 | [
"MIT"
] | 5 | 2021-02-20T12:58:39.000Z | 2022-02-01T02:23:23.000Z | ExUnit.start()
Mox.defmock(Publishing.Tesla.Mock, for: Tesla.Adapter)
| 17.75 | 54 | 0.774648 |
f7ac9dc629cc53aeb89e9b115d9bebaa1e549935 | 325 | ex | Elixir | examples/echo.ex | ewildgoose/thousand_island | 2686f86f897a5bd66b9271c42322b13052c3314e | [
"MIT"
] | 139 | 2019-11-29T02:11:40.000Z | 2022-03-24T00:17:52.000Z | examples/echo.ex | ewildgoose/thousand_island | 2686f86f897a5bd66b9271c42322b13052c3314e | [
"MIT"
] | 11 | 2020-01-23T15:43:58.000Z | 2022-02-15T10:16:37.000Z | examples/echo.ex | ewildgoose/thousand_island | 2686f86f897a5bd66b9271c42322b13052c3314e | [
"MIT"
] | 8 | 2021-10-09T16:36:35.000Z | 2022-01-31T20:39:10.000Z | defmodule Echo do
@moduledoc """
A sample Handler implementation of the Echo protocol
https://en.wikipedia.org/wiki/Echo_Protocol
"""
use ThousandIsland.Handler
@impl ThousandIsland.Handler
def handle_data(data, socket, state) do
ThousandIsland.Socket.send(socket, data)
{:continue, state}
end
end
| 20.3125 | 54 | 0.732308 |
f7aca1b131140f6804a365be132bd078da97e4dc | 2,172 | ex | Elixir | lib/util/test/data_builder.ex | NickMcG/SNEEX | 901215dea41fa21314a4f4db46b51648158f1544 | [
"MIT"
] | 1 | 2019-11-16T00:33:02.000Z | 2019-11-16T00:33:02.000Z | lib/util/test/data_builder.ex | NickMcG/SNEEX | 901215dea41fa21314a4f4db46b51648158f1544 | [
"MIT"
] | 1 | 2019-08-11T23:02:15.000Z | 2019-08-11T23:02:15.000Z | lib/util/test/data_builder.ex | NickMcG/SNEEX | 901215dea41fa21314a4f4db46b51648158f1544 | [
"MIT"
] | null | null | null | defmodule Util.Test.DataBuilder do
@moduledoc """
This is a module that makes it easier to generate data for tests.
This is not really intended for main consumption
"""
def build_block_of_00s(length) do
append_data_to_block(<<>>, length, 0x00)
end
def build_block_of_ffs(length) do
append_data_to_block(<<>>, length, 0xFF)
end
defp append_data_to_block(block, 0, _data) do
block
end
defp append_data_to_block(block, count, data) when count >= 16 do
ffs =
<<data, data, data, data, data, data, data, data, data, data, data, data, data, data, data,
data>>
append_data_to_block(block <> ffs, count - 16, data)
end
defp append_data_to_block(block, count, data) do
append_data_to_block(block <> <<data>>, count - 1, data)
end
def build_final_fantasy_2_header do
<<
0x46,
0x49,
0x4E,
0x41,
0x4C,
0x20,
0x46,
0x41,
0x4E,
0x54,
0x41,
0x53,
0x59,
0x20,
0x49,
0x49,
0x20,
0x20,
0x20,
0x20,
0x20,
0x20,
0x02,
0x0A,
0x03,
0x01,
0xC3,
0x00,
0x0F,
0x7A,
0xF0,
0x85,
0xFF,
0xFF,
0xFF,
0xFF,
0xFF,
0xFF,
0xFF,
0xFF,
0xFF,
0xFF,
0x00,
0x02,
0xFF,
0xFF,
0x04,
0x02,
0xFF,
0xFF,
0xFF,
0xFF,
0xFF,
0xFF,
0xFF,
0xFF,
0xFF,
0xFF,
0xFF,
0xFF,
0x00,
0x80,
0xFF,
0xFF
>>
end
def build_data_for_bank_without_header do
build_block_of_ffs(Util.Bank.bank_size())
end
def build_data_for_bank_with_header_on_page_7 do
p = build_block_of_ffs(Util.Page.page_size())
p7 = build_block_of_ffs(Util.Page.page_size() - 64) <> build_final_fantasy_2_header()
p <> p <> p <> p <> p <> p <> p <> p7 <> p <> p <> p <> p <> p <> p <> p <> p
end
def build_data_for_bank_with_header_on_page_f do
dummy_data = build_block_of_ffs(Util.Bank.bank_size() - 64)
dummy_data <> build_final_fantasy_2_header()
end
end
| 18.724138 | 97 | 0.560773 |
f7acc4f76713548a4c0bf8b413878b8982fce61e | 74 | ex | Elixir | code examples/example-11-16.ex | kujua/erlang-elixir-imperative-bookcompanion | 7bc9f033bacd0f8744ec6bcee3932794d594fe69 | [
"Apache-2.0"
] | 8 | 2016-08-14T12:35:16.000Z | 2021-01-26T04:05:31.000Z | code examples/example-11-16.ex | kujua/erlang-elixir-imperative-bookcompanion | 7bc9f033bacd0f8744ec6bcee3932794d594fe69 | [
"Apache-2.0"
] | null | null | null | code examples/example-11-16.ex | kujua/erlang-elixir-imperative-bookcompanion | 7bc9f033bacd0f8744ec6bcee3932794d594fe69 | [
"Apache-2.0"
] | 5 | 2016-08-18T22:12:19.000Z | 2020-02-17T18:52:41.000Z | defmodule DARMetaData.Repo do
use Ecto.Repo, otp_app: :dar_metadata
end
| 18.5 | 39 | 0.797297 |
f7ace8603bc0b841bfd8c9f2e7c496012d0a4eb4 | 1,731 | exs | Elixir | test/blue_heron/address_test.exs | kevinansfield/blue_heron | 8339e6747e135030f7d1e67801391f03f2558e0d | [
"Apache-2.0"
] | 45 | 2020-10-17T13:34:15.000Z | 2022-03-08T09:40:43.000Z | test/blue_heron/address_test.exs | kevinansfield/blue_heron | 8339e6747e135030f7d1e67801391f03f2558e0d | [
"Apache-2.0"
] | 20 | 2020-10-15T15:05:54.000Z | 2022-03-27T15:54:36.000Z | test/blue_heron/address_test.exs | kevinansfield/blue_heron | 8339e6747e135030f7d1e67801391f03f2558e0d | [
"Apache-2.0"
] | 11 | 2020-10-23T17:18:57.000Z | 2022-03-15T20:01:49.000Z | defmodule BlueHeron.AddressTest do
use ExUnit.Case
doctest BlueHeron.Address
alias BlueHeron.Address
test "from integer" do
address = Address.parse(0xA4C138A0498B)
assert address.integer == 0xA4C138A0498B
assert address.binary == <<0xA4, 0xC1, 0x38, 0xA0, 0x49, 0x8B>>
assert address.string == "A4:C1:38:A0:49:8B"
end
test "from string" do
address = Address.parse("A4:C1:38:A0:49:8B")
assert address.integer == 0xA4C138A0498B
assert address.binary == <<0xA4, 0xC1, 0x38, 0xA0, 0x49, 0x8B>>
assert address.string == "A4:C1:38:A0:49:8B"
end
test "from binary" do
address = Address.parse(<<0xA4, 0xC1, 0x38, 0xA0, 0x49, 0x8B>>)
assert address.integer == 0xA4C138A0498B
assert address.binary == <<0xA4, 0xC1, 0x38, 0xA0, 0x49, 0x8B>>
assert address.string == "A4:C1:38:A0:49:8B"
end
test "to_string" do
address_from_integer = Address.parse(0xA4C138A0498B)
address_from_string = Address.parse("A4:C1:38:A0:49:8B")
address_from_binary = Address.parse(<<0xA4, 0xC1, 0x38, 0xA0, 0x49, 0x8B>>)
assert to_string(address_from_integer) == "A4:C1:38:A0:49:8B"
assert to_string(address_from_string) == "A4:C1:38:A0:49:8B"
assert to_string(address_from_binary) == "A4:C1:38:A0:49:8B"
end
test "inspect" do
address_from_integer = Address.parse(0xA4C138A0498B)
address_from_string = Address.parse("A4:C1:38:A0:49:8B")
address_from_binary = Address.parse(<<0xA4, 0xC1, 0x38, 0xA0, 0x49, 0x8B>>)
inspect(to_string(address_from_integer) == "Address<A4:C1:38:A0:49:8B>")
inspect(to_string(address_from_string) == "Address<A4:C1:38:A0:49:8B>")
inspect(to_string(address_from_binary) == "Address<A4:C1:38:A0:49:8B>")
end
end
| 36.829787 | 79 | 0.695552 |
f7ace9ffbe2e4e07bfc9c6b87669408cecc61ff9 | 402 | ex | Elixir | test/support/fixtures/please_fixtures.ex | christian-fei/honeydew | af06f5778de164fd50979ae20e59b6aeb3092485 | [
"MIT"
] | 13 | 2022-02-13T18:43:20.000Z | 2022-03-19T11:53:48.000Z | test/support/fixtures/please_fixtures.ex | christian-fei/honeydew | af06f5778de164fd50979ae20e59b6aeb3092485 | [
"MIT"
] | 1 | 2022-02-23T13:57:19.000Z | 2022-02-23T13:57:19.000Z | test/support/fixtures/please_fixtures.ex | christian-fei/honeydew | af06f5778de164fd50979ae20e59b6aeb3092485 | [
"MIT"
] | 3 | 2022-02-13T19:25:19.000Z | 2022-02-22T17:56:52.000Z | defmodule Honeydew.PleaseFixtures do
@moduledoc """
This module defines test helpers for creating
entities via the `Honeydew.Please` context.
"""
@doc """
Generate a list.
"""
def list_fixture(attrs \\ %{}) do
{:ok, list} =
attrs
|> Enum.into(%{
name: "some name",
notes: "some notes"
})
|> Honeydew.Please.create_list()
list
end
end
| 18.272727 | 47 | 0.58209 |
f7acf7a20fba60c9f92cfd82d21107ad35a4ab2d | 1,579 | exs | Elixir | 06/part2.ets.exs | seantanly/elixir-advent_of_code | 1e39ac46bc01f5c8cffd2d2f79f9af0b71767291 | [
"MIT"
] | 3 | 2016-01-18T01:14:45.000Z | 2017-05-11T09:14:49.000Z | 06/part2.ets.exs | seantanly/elixir-advent_of_code | 1e39ac46bc01f5c8cffd2d2f79f9af0b71767291 | [
"MIT"
] | null | null | null | 06/part2.ets.exs | seantanly/elixir-advent_of_code | 1e39ac46bc01f5c8cffd2d2f79f9af0b71767291 | [
"MIT"
] | null | null | null | # Using ETS instead of Map reduces the time taken from 45 to 24 seconds.
defmodule LightArray do
def str_to_pos(str), do: str |> String.split(",", trim: true) |> Enum.map(&String.to_integer/1) |> List.to_tuple
def exec_cmd(cmd_str, grid) do
[_, cmd, start_pos, end_pos] = Regex.run(~r/([\w ]+) (\d+,\d+) through (\d+,\d+)/, cmd_str)
{x1, y1} = str_to_pos(start_pos)
{x2, y2} = str_to_pos(end_pos)
Enum.each(x1..x2, fn x ->
Enum.each(y1..y2, fn y ->
pos = {x, y}
case cmd do
"turn on" ->
new_obj = case :ets.lookup(grid, pos) do
[] -> {pos, 1}
[{^pos, val}] -> {pos, val + 1}
end
:ets.insert(grid, new_obj)
"toggle" ->
new_obj = case :ets.lookup(grid, pos) do
[] -> {pos, 2}
[{^pos, val}] -> {pos, val + 2}
end
:ets.insert(grid, new_obj)
"turn off" ->
new_obj = case :ets.lookup(grid, pos) do
[] -> {pos, 0}
[{^pos, val}] -> {pos, Enum.max([0, val - 1])}
end
:ets.insert(grid, new_obj)
_ -> raise "Unknown cmd: #{inspect cmd_str}"
end
end)
end)
grid
end
end
grid = :ets.new(:grid, [:set, :named_table])
result = Path.join(__DIR__, "input.txt")
|> File.read!
|> String.split("\n", trim: true)
|> Enum.reduce(grid, fn cmd_str, grid -> LightArray.exec_cmd(cmd_str, grid) end)
|> :ets.tab2list
|> Enum.reduce(0, fn {_k, v}, acc -> acc + v end)
|> IO.inspect
^result = 15343601
| 29.240741 | 114 | 0.50665 |
f7ad1aa23cf51b0bcf869f5e2a28db201ae88b61 | 7,199 | ex | Elixir | lib/aws/generated/transcribe_streaming.ex | pecigonzalo/aws-elixir | b52181ebfb9e62349dc8e8067b7fbcd4f7a18c68 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/transcribe_streaming.ex | pecigonzalo/aws-elixir | b52181ebfb9e62349dc8e8067b7fbcd4f7a18c68 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/transcribe_streaming.ex | pecigonzalo/aws-elixir | b52181ebfb9e62349dc8e8067b7fbcd4f7a18c68 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.TranscribeStreaming do
@moduledoc """
Operations and objects for transcribing streaming speech to text.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2017-10-26",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "transcribestreaming",
global?: false,
protocol: "rest-json",
service_id: "Transcribe Streaming",
signature_version: "v4",
signing_name: "transcribe",
target_prefix: nil
}
end
@doc """
Starts a bidirectional HTTP/2 stream where audio is streamed to Amazon
Transcribe Medical and the transcription results are streamed to your
application.
"""
def start_medical_stream_transcription(%Client{} = client, input, options \\ []) do
url_path = "/medical-stream-transcription"
{headers, input} =
[
{"ContentIdentificationType", "x-amzn-transcribe-content-identification-type"},
{"EnableChannelIdentification", "x-amzn-transcribe-enable-channel-identification"},
{"LanguageCode", "x-amzn-transcribe-language-code"},
{"MediaEncoding", "x-amzn-transcribe-media-encoding"},
{"MediaSampleRateHertz", "x-amzn-transcribe-sample-rate"},
{"NumberOfChannels", "x-amzn-transcribe-number-of-channels"},
{"SessionId", "x-amzn-transcribe-session-id"},
{"ShowSpeakerLabel", "x-amzn-transcribe-show-speaker-label"},
{"Specialty", "x-amzn-transcribe-specialty"},
{"Type", "x-amzn-transcribe-type"},
{"VocabularyName", "x-amzn-transcribe-vocabulary-name"}
]
|> Request.build_params(input)
query_params = []
options =
Keyword.put(
options,
:response_header_parameters,
[
{"x-amzn-transcribe-content-identification-type", "ContentIdentificationType"},
{"x-amzn-transcribe-enable-channel-identification", "EnableChannelIdentification"},
{"x-amzn-transcribe-language-code", "LanguageCode"},
{"x-amzn-transcribe-media-encoding", "MediaEncoding"},
{"x-amzn-transcribe-sample-rate", "MediaSampleRateHertz"},
{"x-amzn-transcribe-number-of-channels", "NumberOfChannels"},
{"x-amzn-request-id", "RequestId"},
{"x-amzn-transcribe-session-id", "SessionId"},
{"x-amzn-transcribe-show-speaker-label", "ShowSpeakerLabel"},
{"x-amzn-transcribe-specialty", "Specialty"},
{"x-amzn-transcribe-type", "Type"},
{"x-amzn-transcribe-vocabulary-name", "VocabularyName"}
]
)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Starts a bidirectional HTTP/2 stream where audio is streamed to Amazon
Transcribe and the transcription results are streamed to your application.
The following are encoded as HTTP/2 headers:
* x-amzn-transcribe-language-code
* x-amzn-transcribe-media-encoding
* x-amzn-transcribe-sample-rate
* x-amzn-transcribe-session-id
See the [ SDK for Go API Reference](https://docs.aws.amazon.com/sdk-for-go/api/service/transcribestreamingservice/#TranscribeStreamingService.StartStreamTranscription)
for more detail.
"""
def start_stream_transcription(%Client{} = client, input, options \\ []) do
url_path = "/stream-transcription"
{headers, input} =
[
{"ContentIdentificationType", "x-amzn-transcribe-content-identification-type"},
{"ContentRedactionType", "x-amzn-transcribe-content-redaction-type"},
{"EnableChannelIdentification", "x-amzn-transcribe-enable-channel-identification"},
{"EnablePartialResultsStabilization",
"x-amzn-transcribe-enable-partial-results-stabilization"},
{"IdentifyLanguage", "x-amzn-transcribe-identify-language"},
{"LanguageCode", "x-amzn-transcribe-language-code"},
{"LanguageModelName", "x-amzn-transcribe-language-model-name"},
{"LanguageOptions", "x-amzn-transcribe-language-options"},
{"MediaEncoding", "x-amzn-transcribe-media-encoding"},
{"MediaSampleRateHertz", "x-amzn-transcribe-sample-rate"},
{"NumberOfChannels", "x-amzn-transcribe-number-of-channels"},
{"PartialResultsStability", "x-amzn-transcribe-partial-results-stability"},
{"PiiEntityTypes", "x-amzn-transcribe-pii-entity-types"},
{"PreferredLanguage", "x-amzn-transcribe-preferred-language"},
{"SessionId", "x-amzn-transcribe-session-id"},
{"ShowSpeakerLabel", "x-amzn-transcribe-show-speaker-label"},
{"VocabularyFilterMethod", "x-amzn-transcribe-vocabulary-filter-method"},
{"VocabularyFilterName", "x-amzn-transcribe-vocabulary-filter-name"},
{"VocabularyFilterNames", "x-amzn-transcribe-vocabulary-filter-names"},
{"VocabularyName", "x-amzn-transcribe-vocabulary-name"},
{"VocabularyNames", "x-amzn-transcribe-vocabulary-names"}
]
|> Request.build_params(input)
query_params = []
options =
Keyword.put(
options,
:response_header_parameters,
[
{"x-amzn-transcribe-content-identification-type", "ContentIdentificationType"},
{"x-amzn-transcribe-content-redaction-type", "ContentRedactionType"},
{"x-amzn-transcribe-enable-channel-identification", "EnableChannelIdentification"},
{"x-amzn-transcribe-enable-partial-results-stabilization",
"EnablePartialResultsStabilization"},
{"x-amzn-transcribe-identify-language", "IdentifyLanguage"},
{"x-amzn-transcribe-language-code", "LanguageCode"},
{"x-amzn-transcribe-language-model-name", "LanguageModelName"},
{"x-amzn-transcribe-language-options", "LanguageOptions"},
{"x-amzn-transcribe-media-encoding", "MediaEncoding"},
{"x-amzn-transcribe-sample-rate", "MediaSampleRateHertz"},
{"x-amzn-transcribe-number-of-channels", "NumberOfChannels"},
{"x-amzn-transcribe-partial-results-stability", "PartialResultsStability"},
{"x-amzn-transcribe-pii-entity-types", "PiiEntityTypes"},
{"x-amzn-transcribe-preferred-language", "PreferredLanguage"},
{"x-amzn-request-id", "RequestId"},
{"x-amzn-transcribe-session-id", "SessionId"},
{"x-amzn-transcribe-show-speaker-label", "ShowSpeakerLabel"},
{"x-amzn-transcribe-vocabulary-filter-method", "VocabularyFilterMethod"},
{"x-amzn-transcribe-vocabulary-filter-name", "VocabularyFilterName"},
{"x-amzn-transcribe-vocabulary-filter-names", "VocabularyFilterNames"},
{"x-amzn-transcribe-vocabulary-name", "VocabularyName"},
{"x-amzn-transcribe-vocabulary-names", "VocabularyNames"}
]
)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
| 39.994444 | 169 | 0.657869 |
f7ad2b42f0eaa70c1e1974ce9fb063ccf8826276 | 748 | exs | Elixir | ch07-04.exs | gabrielelana/programming-elixir | 475319123d21b03c3bfcc02a23178ab9db67a6b3 | [
"MIT"
] | 9 | 2016-01-22T17:28:27.000Z | 2020-06-07T01:38:44.000Z | ch07-04.exs | gabrielelana/programming-elixir | 475319123d21b03c3bfcc02a23178ab9db67a6b3 | [
"MIT"
] | null | null | null | ch07-04.exs | gabrielelana/programming-elixir | 475319123d21b03c3bfcc02a23178ab9db67a6b3 | [
"MIT"
] | 1 | 2019-04-18T10:08:38.000Z | 2019-04-18T10:08:38.000Z | # An Elixir single-quoted string is actually a list of individual character
# codes. Write a caesar(list, n) function that adds n to each list element,
# wrapping if the addition results in a character greater than z.
defmodule MyList do
def caesar(l, shift) do
reduce(l, '', fn(c, encoded) -> encoded ++ [?a + rem(c - ?a + shift, ?z - ?a + 1)] end)
end
def reduce([], _), do: raise "Empty list cannot be reduced without an initial value"
def reduce([h|t], f), do: reduce(t, h, f)
def reduce([], value, _), do: value
def reduce([h|t], value, f), do: reduce(t, f.(h, value), f)
end
ExUnit.start
defmodule Ch07.Test do
use ExUnit.Case
test "MyList.caesar/2" do
assert 'elixir' === MyList.caesar('ryvkve', 13)
end
end
| 27.703704 | 91 | 0.657754 |
f7ad47dce1b3fa949c9ac31f99ce59c56f844380 | 703 | ex | Elixir | lib/membrane/core/element/callback_context.ex | vKxni/membrane_core | d14a67304b63706d6df520fa306dd2fda147c07c | [
"Apache-2.0"
] | null | null | null | lib/membrane/core/element/callback_context.ex | vKxni/membrane_core | d14a67304b63706d6df520fa306dd2fda147c07c | [
"Apache-2.0"
] | null | null | null | lib/membrane/core/element/callback_context.ex | vKxni/membrane_core | d14a67304b63706d6df520fa306dd2fda147c07c | [
"Apache-2.0"
] | null | null | null | defmodule Membrane.Core.Element.CallbackContext do
@moduledoc false
use Membrane.Core.CallbackContext,
pads: %{Membrane.Pad.ref_t() => Membrane.Element.PadData.t()},
playback_state: Membrane.PlaybackState.t(),
clock: Membrane.Clock.t() | nil,
parent_clock: Membrane.Clock.t() | nil,
name: Membrane.Element.name_t()
@impl true
def extract_default_fields(state, args) do
quote do
[
playback_state: unquote(state).playback.state,
pads: unquote(state).pads.data,
clock: unquote(state).synchronization.clock,
parent_clock: unquote(state).synchronization.parent_clock,
name: unquote(state).name
]
end ++ args
end
end
| 29.291667 | 66 | 0.681366 |
f7ad532a71b3a8a039d5dccc68e2aa80eceaedda | 412 | ex | Elixir | lib/auth_zuma/middleware/bearer_middleware.ex | victorolinasc/auth-zuma | a107130440ed79731b8e28e84ed282111f0aa1f1 | [
"Apache-2.0"
] | 3 | 2018-07-16T04:24:01.000Z | 2018-07-17T14:36:02.000Z | lib/auth_zuma/middleware/bearer_middleware.ex | victorolinasc/auth-zuma | a107130440ed79731b8e28e84ed282111f0aa1f1 | [
"Apache-2.0"
] | null | null | null | lib/auth_zuma/middleware/bearer_middleware.ex | victorolinasc/auth-zuma | a107130440ed79731b8e28e84ed282111f0aa1f1 | [
"Apache-2.0"
] | null | null | null | defmodule AuthZuma.Middleware.Bearer do
@moduledoc """
Uses the `authentication_module` option for fetching the access_token
from the response.
"""
@behaviour Tesla.Middleware
@impl true
def call(env, next, _options) do
{:ok, token} = env.opts[:authentication_module].authenticate()
%{env | headers: [{"Authorization", "Bearer #{token}"} | env.headers]}
|> Tesla.run(next)
end
end
| 24.235294 | 74 | 0.68932 |
f7ad61dae382db9b14d7b364ab27971a40381caa | 3,569 | ex | Elixir | clients/firebase_dynamic_links/lib/google_api/firebase_dynamic_links/v1/model/get_ios_post_install_attribution_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/firebase_dynamic_links/lib/google_api/firebase_dynamic_links/v1/model/get_ios_post_install_attribution_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/firebase_dynamic_links/lib/google_api/firebase_dynamic_links/v1/model/get_ios_post_install_attribution_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.FirebaseDynamicLinks.V1.Model.GetIosPostInstallAttributionRequest do
@moduledoc """
Request for iSDK to execute strong match flow for post-install attribution. This is meant for iOS requests only. Requests from other platforms will not be honored.
## Attributes
- appInstallationTime (String.t): App installation epoch time (https://en.wikipedia.org/wiki/Unix_time). This is a client signal for a more accurate weak match. Defaults to: `null`.
- bundleId (String.t): APP bundle ID. Defaults to: `null`.
- device (DeviceInfo): Device information. Defaults to: `null`.
- iosVersion (String.t): iOS version, ie: 9.3.5. Consider adding \"build\". Defaults to: `null`.
- retrievalMethod (String.t): App post install attribution retrieval information. Disambiguates mechanism (iSDK or developer invoked) to retrieve payload from clicked link. Defaults to: `null`.
- Enum - one of [UNKNOWN_PAYLOAD_RETRIEVAL_METHOD, IMPLICIT_WEAK_MATCH, EXPLICIT_WEAK_MATCH, EXPLICIT_STRONG_AFTER_WEAK_MATCH]
- sdkVersion (String.t): Google SDK version. Version takes the form \"$major.$minor.$patch\" Defaults to: `null`.
- uniqueMatchLinkToCheck (String.t): Possible unique matched link that server need to check before performing fingerprint match. If passed link is short server need to expand the link. If link is long server need to vslidate the link. Defaults to: `null`.
- visualStyle (String.t): Strong match page information. Disambiguates between default UI and custom page to present when strong match succeeds/fails to find cookie. Defaults to: `null`.
- Enum - one of [UNKNOWN_VISUAL_STYLE, DEFAULT_STYLE, CUSTOM_STYLE]
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:appInstallationTime => any(),
:bundleId => any(),
:device => GoogleApi.FirebaseDynamicLinks.V1.Model.DeviceInfo.t(),
:iosVersion => any(),
:retrievalMethod => any(),
:sdkVersion => any(),
:uniqueMatchLinkToCheck => any(),
:visualStyle => any()
}
field(:appInstallationTime)
field(:bundleId)
field(:device, as: GoogleApi.FirebaseDynamicLinks.V1.Model.DeviceInfo)
field(:iosVersion)
field(:retrievalMethod)
field(:sdkVersion)
field(:uniqueMatchLinkToCheck)
field(:visualStyle)
end
defimpl Poison.Decoder,
for: GoogleApi.FirebaseDynamicLinks.V1.Model.GetIosPostInstallAttributionRequest do
def decode(value, options) do
GoogleApi.FirebaseDynamicLinks.V1.Model.GetIosPostInstallAttributionRequest.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.FirebaseDynamicLinks.V1.Model.GetIosPostInstallAttributionRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.960526 | 257 | 0.744466 |
f7ad6904f2eda23c1f4139e21f3ef4c3e5ea539f | 117 | exs | Elixir | .formatter.exs | tyrchen/nimble_parsers | 726c895294a966cbaf1a752a9da21a7e51a3505e | [
"MIT"
] | 2 | 2019-07-14T08:06:24.000Z | 2019-07-14T08:08:54.000Z | .formatter.exs | tyrchen/common_parser | 726c895294a966cbaf1a752a9da21a7e51a3505e | [
"MIT"
] | 16 | 2019-02-10T06:25:44.000Z | 2019-10-27T09:35:00.000Z | .formatter.exs | tyrchen/common_parser | 726c895294a966cbaf1a752a9da21a7e51a3505e | [
"MIT"
] | null | null | null | # Used by "mix format"
[
inputs: ["mix.exs", "{config,lib,test}/**/*.{ex,exs}"],
import_deps: [:nimble_parsec]
]
| 19.5 | 57 | 0.589744 |
f7ad7cd08d5301d1aafd35c05bfc1af0cea58856 | 2,154 | exs | Elixir | zipper/zipper_test.exs | nickpellant/xelixir | 6e27dc7083ef3e423a1615f5151910672e6397a8 | [
"MIT"
] | null | null | null | zipper/zipper_test.exs | nickpellant/xelixir | 6e27dc7083ef3e423a1615f5151910672e6397a8 | [
"MIT"
] | null | null | null | zipper/zipper_test.exs | nickpellant/xelixir | 6e27dc7083ef3e423a1615f5151910672e6397a8 | [
"MIT"
] | null | null | null | if System.get_env("EXERCISM_TEST_EXAMPLES") do
Code.load_file("example.exs")
else
Code.load_file("zipper.exs")
end
ExUnit.start
ExUnit.configure exclude: :pending, trace: true
defmodule ZipperTest do
alias BinTree, as: BT
import Zipper
# A custom inspect instance purely for the tests, this makes error messages
# much more readable.
#
# BT[value: 3, left: BT[value: 5, right: BT[value: 6]]] becomes (3:(5::(6::)):)
defimpl Inspect, for: BT do
import Inspect.Algebra
def inspect(%BinTree{value: v, left: l, right: r}, opts) do
concat ["(", to_doc(v, opts),
":", (if l, do: to_doc(l, opts), else: ""),
":", (if r, do: to_doc(r, opts), else: ""),
")"]
end
end
use ExUnit.Case, async: false
defp bt(value, left, right), do: %BT{value: value, left: left, right: right}
defp leaf(value), do: %BT{value: value}
defp t1, do: bt(1, bt(2, nil, leaf(3)), leaf(4))
defp t2, do: bt(1, bt(5, nil, leaf(3)), leaf(4))
defp t3, do: bt(1, bt(2, leaf(5), leaf(3)), leaf(4))
defp t4, do: bt(1, leaf(2), leaf(4))
defp t5, do: bt(1, bt(2, nil, leaf(3)),
bt(6, leaf(7), leaf(8)))
# @tag :pending
test "data is retained" do
assert (t1 |> from_tree |> to_tree) == t1
end
@tag :pending
test "left, right and value" do
assert (t1 |> from_tree |> left |> right |> value) == 3
end
@tag :pending
test "dead end" do
assert (t1 |> from_tree |> left |> left) == nil
end
@tag :pending
test "tree from deep focus" do
assert (t1 |> from_tree |> left |> right |> to_tree) == t1
end
@tag :pending
test "set_value" do
assert (t1 |> from_tree |> left |> set_value(5) |> to_tree) == t2
end
@tag :pending
test "set_left with leaf" do
assert (t1 |> from_tree |> left |> set_left(leaf(5)) |> to_tree) == t3
end
@tag :pending
test "set_right with nil" do
assert (t1 |> from_tree |> left |> set_right(nil) |> to_tree) == t4
end
@tag :pending
test "set_right with subtree" do
assert (t1 |> from_tree |> set_right(bt(6, leaf(7), leaf(8))) |> to_tree) == t5
end
end
| 26.592593 | 83 | 0.584494 |
f7ad9088abd2a56c5c1d9b1112764885b610c7ef | 1,764 | ex | Elixir | lib/re_web/router.ex | diemesleno/backend | a55f9c846cc826b5269f3fd6ce19223f0c6a1682 | [
"MIT"
] | 1 | 2020-01-23T04:24:58.000Z | 2020-01-23T04:24:58.000Z | lib/re_web/router.ex | diemesleno/backend | a55f9c846cc826b5269f3fd6ce19223f0c6a1682 | [
"MIT"
] | null | null | null | lib/re_web/router.ex | diemesleno/backend | a55f9c846cc826b5269f3fd6ce19223f0c6a1682 | [
"MIT"
] | 1 | 2019-12-31T16:11:21.000Z | 2019-12-31T16:11:21.000Z | defmodule ReWeb.Router do
use ReWeb, :router
pipeline :public_api do
plug(:accepts, ["json"])
end
pipeline :private_api do
plug(:accepts, ["json"])
plug(ReWeb.GuardianPipeline)
end
scope "/", ReWeb do
pipe_through(:public_api)
resources("/neighborhoods", NeighborhoodController, only: [:index])
resources "/listings", ListingController, only: [:index, :show] do
resources("/interests", InterestController, only: [:create])
resources("/related", RelatedController, only: [:index])
end
resources("/featured_listings", FeaturedController, only: [:index])
end
scope "/users", ReWeb do
pipe_through(:public_api)
put("/confirm", UserController, :confirm)
post("/login", UserController, :login)
post("/register", UserController, :register)
post("/reset_password", UserController, :reset_password)
post("/redefine_password", UserController, :redefine_password)
end
scope "/", ReWeb do
pipe_through(:private_api)
resources "/listings", ListingController, except: [:new] do
resources("/images", ImageController, only: [:index, :create, :delete])
put("/images_orders", ImageController, :order)
end
end
scope "/users", ReWeb do
pipe_through(:private_api)
post("/edit_password", UserController, :edit_password)
put("/change_email", UserController, :change_email)
end
if Mix.env() == :dev do
pipeline :browser do
plug(:accepts, ["html"])
plug(:fetch_session)
plug(:fetch_flash)
plug(:protect_from_forgery)
plug(:put_secure_browser_headers)
end
scope "/dev" do
pipe_through(:browser)
forward("/mailbox", Plug.Swoosh.MailboxPreview, base_path: "/dev/mailbox")
end
end
end
| 25.565217 | 80 | 0.671202 |
f7adbbe30f3dac08b9f3ab0989885cb2a6524c2b | 1,517 | exs | Elixir | mix.exs | lukkor/exlog | f2812d25e7712c41db5a9bf5bf3977255916c8e1 | [
"MIT"
] | null | null | null | mix.exs | lukkor/exlog | f2812d25e7712c41db5a9bf5bf3977255916c8e1 | [
"MIT"
] | null | null | null | mix.exs | lukkor/exlog | f2812d25e7712c41db5a9bf5bf3977255916c8e1 | [
"MIT"
] | null | null | null | defmodule Exlog.Mixfile do
use Mix.Project
def project do
[app: :exlog,
version: "0.0.1",
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps()]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[mod: {Exlog.Application, []},
extra_applications: [:logger, :runtime_tools]]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[{:phoenix, "~> 1.3.0-rc"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_ecto, "~> 3.2"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.6"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"}]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
["ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"test": ["ecto.create --quiet", "ecto.migrate", "test"]]
end
end
| 28.622642 | 78 | 0.607119 |
f7adbc8314541f5e047eae09d629e77e9367af17 | 223 | ex | Elixir | lib/ohio_elixir_web/views/view_helpers/date.ex | davemenninger/ohio_elixir | 9472b71fa906e30a2a5fdc013256a5e80caedc2f | [
"MIT"
] | 7 | 2021-01-22T00:20:04.000Z | 2022-03-30T22:07:32.000Z | lib/ohio_elixir_web/views/view_helpers/date.ex | davemenninger/ohio_elixir | 9472b71fa906e30a2a5fdc013256a5e80caedc2f | [
"MIT"
] | 11 | 2021-10-05T03:59:28.000Z | 2022-03-20T21:54:44.000Z | lib/ohio_elixir_web/views/view_helpers/date.ex | davemenninger/ohio_elixir | 9472b71fa906e30a2a5fdc013256a5e80caedc2f | [
"MIT"
] | 3 | 2021-06-10T02:48:54.000Z | 2021-10-09T03:43:06.000Z | defmodule OhioElixirWeb.ViewHelpers.Date do
@moduledoc false
def format_date_time(datetime) do
datetime
|> DateTime.shift_zone!("America/New_York")
|> Calendar.strftime("%Y-%m-%d %I:%M:%S %p EST")
end
end
| 24.777778 | 52 | 0.695067 |
f7adbeb7494eca970492f4f77b85f5b41ad99b86 | 3,999 | ex | Elixir | lib/line_bot/message/template.ex | adamu/line_bot | dbc2a91b15e6670af563ff78889ffb258bf00edf | [
"Apache-2.0"
] | 6 | 2019-11-04T23:54:42.000Z | 2021-09-14T07:14:00.000Z | lib/line_bot/message/template.ex | adamu/line_bot | dbc2a91b15e6670af563ff78889ffb258bf00edf | [
"Apache-2.0"
] | 4 | 2019-11-08T03:49:33.000Z | 2019-11-08T03:55:37.000Z | lib/line_bot/message/template.ex | adamu/line_bot | dbc2a91b15e6670af563ff78889ffb258bf00edf | [
"Apache-2.0"
] | 1 | 2020-05-14T08:22:36.000Z | 2020-05-14T08:22:36.000Z | defmodule LineBot.Message.Template do
use LineBot.Message
@moduledoc """
Represents a [Template message](https://developers.line.biz/en/reference/messaging-api/#template-messages).
"""
@type t :: %__MODULE__{
altText: String.t(),
template:
LineBot.Message.Template.Buttons.t()
| LineBot.Message.Template.Confirm.t()
| LineBot.Message.Template.Carousel.t()
| LineBot.Message.Template.ImageCarousel.t(),
type: :template,
quickReply: LineBot.Message.QuickReply.t() | nil
}
@enforce_keys [:altText, :template]
defstruct [:altText, :template, :quickReply, type: :template]
end
defmodule LineBot.Message.Template.Buttons do
use LineBot.Message
@moduledoc """
Represents a [Buttons template](https://developers.line.biz/en/reference/messaging-api/#buttons).
"""
@type t :: %__MODULE__{
thumbnailImageUrl: String.t() | nil,
imageAspectRatio: :rectangle | :square | nil,
imageSize: :cover | :contain | nil,
imageBackgroundColor: String.t() | nil,
title: String.t() | nil,
text: String.t(),
defaultAction: LineBot.Message.Action.t() | nil,
actions: [LineBot.Message.Action.t()],
type: :buttons
}
@enforce_keys [:text, :actions]
defstruct [
:thumbnailImageUrl,
:imageAspectRatio,
:imageSize,
:imageBackgroundColor,
:title,
:text,
:defaultAction,
:actions,
type: :buttons
]
end
defmodule LineBot.Message.Template.Confirm do
@derive Jason.Encoder
@moduledoc """
Represents a [Confirm template](https://developers.line.biz/en/reference/messaging-api/#confirm).
"""
@type t :: %__MODULE__{
text: String.t(),
actions: [LineBot.Message.Action.t()],
type: :confirm
}
@enforce_keys [:text, :actions]
defstruct [:text, :actions, type: :confirm]
end
defmodule LineBot.Message.Template.Carousel do
use LineBot.Message
@moduledoc """
Represents a [Carousel template](https://developers.line.biz/en/reference/messaging-api/#carousel).
"""
@type t :: %__MODULE__{
columns: [LineBot.Message.Template.Carousel.Column.t()],
imageAspectRatio: :rectangle | :square | nil,
imageSize: :cover | :contain | nil,
type: :carousel
}
@enforce_keys [:columns]
defstruct [:columns, :imageAspectRatio, :imageSize, type: :carousel]
end
defmodule LineBot.Message.Template.Carousel.Column do
use LineBot.Message
@moduledoc """
Represents a [Column object for carousel](https://developers.line.biz/en/reference/messaging-api/#column-object-for-carousel).
"""
@type t :: %__MODULE__{
thumbnailImageUrl: String.t() | nil,
imageBackgroundColor: String.t() | nil,
title: String.t() | nil,
text: String.t(),
defaultAction: LineBot.Message.Action.t() | nil,
actions: [LineBot.Message.Action.t()]
}
@enforce_keys [:text]
defstruct [:thumbnailImageUrl, :imageBackgroundColor, :title, :text, :defaultAction, :actions]
end
defmodule LineBot.Message.Template.ImageCarousel do
@derive Jason.Encoder
@moduledoc """
Represents an [Image carousel template](https://developers.line.biz/en/reference/messaging-api/#image-carousel).
"""
@type t :: %__MODULE__{
columns: [LineBot.Message.Template.ImageCarousel.Column.t()],
type: :image_carousel
}
@enforce_keys [:columns]
defstruct [:columns, type: :image_carousel]
end
defmodule LineBot.Message.Template.ImageCarousel.Column do
@derive Jason.Encoder
@moduledoc """
Represents a [Column object for image carousel](https://developers.line.biz/en/reference/messaging-api/#column-object-for-image-carousel).
"""
@type t :: %__MODULE__{
imageUrl: String.t(),
action: LineBot.Message.Action.t()
}
@enforce_keys [:imageUrl, :action]
defstruct [:imageUrl, :action]
end
| 31.992 | 140 | 0.650413 |
f7ae0d8c739c34577442c844704523a59b2743aa | 3,466 | ex | Elixir | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p2beta1__word_info.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p2beta1__word_info.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p2beta1__word_info.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1_WordInfo do
@moduledoc """
Word-specific information for recognized words. Word information is only
included in the response when certain request parameters are set, such
as `enable_word_time_offsets`.
## Attributes
* `confidence` (*type:* `number()`, *default:* `nil`) - Output only. The confidence estimate between 0.0 and 1.0. A higher number
indicates an estimated greater likelihood that the recognized words are
correct. This field is set only for the top alternative.
This field is not guaranteed to be accurate and users should not rely on it
to be always provided.
The default of 0.0 is a sentinel value indicating `confidence` was not set.
* `endTime` (*type:* `String.t`, *default:* `nil`) - Time offset relative to the beginning of the audio, and
corresponding to the end of the spoken word. This field is only set if
`enable_word_time_offsets=true` and only in the top hypothesis. This is an
experimental feature and the accuracy of the time offset can vary.
* `speakerTag` (*type:* `integer()`, *default:* `nil`) - Output only. A distinct integer value is assigned for every speaker within
the audio. This field specifies which one of those speakers was detected to
have spoken this word. Value ranges from 1 up to diarization_speaker_count,
and is only set if speaker diarization is enabled.
* `startTime` (*type:* `String.t`, *default:* `nil`) - Time offset relative to the beginning of the audio, and
corresponding to the start of the spoken word. This field is only set if
`enable_word_time_offsets=true` and only in the top hypothesis. This is an
experimental feature and the accuracy of the time offset can vary.
* `word` (*type:* `String.t`, *default:* `nil`) - The word corresponding to this set of information.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:confidence => number(),
:endTime => String.t(),
:speakerTag => integer(),
:startTime => String.t(),
:word => String.t()
}
field(:confidence)
field(:endTime)
field(:speakerTag)
field(:startTime)
field(:word)
end
defimpl Poison.Decoder,
for: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1_WordInfo do
def decode(value, options) do
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1_WordInfo.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1_WordInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 43.325 | 135 | 0.725043 |
f7ae287d930dad9f7168f7e74903098d6d080040 | 44,430 | exs | Elixir | test/geometry_test.exs | hrzndhrn/geometry | bffdac0a9554f7f5fd05caceee0fa8f3c96d1c60 | [
"MIT"
] | null | null | null | test/geometry_test.exs | hrzndhrn/geometry | bffdac0a9554f7f5fd05caceee0fa8f3c96d1c60 | [
"MIT"
] | 2 | 2020-10-25T10:06:07.000Z | 2020-10-26T18:15:20.000Z | test/geometry_test.exs | hrzndhrn/geometry | bffdac0a9554f7f5fd05caceee0fa8f3c96d1c60 | [
"MIT"
] | null | null | null | defmodule GeometryTest do
use ExUnit.Case, async: true
import Prove
alias Geometry.{
Feature,
FeatureCollection,
GeometryCollection,
GeometryCollectionM,
GeometryCollectionZ,
GeometryCollectionZM,
Hex,
LineString,
LineStringM,
LineStringZ,
LineStringZM,
MultiLineString,
MultiLineStringM,
MultiLineStringZ,
MultiLineStringZM,
MultiPoint,
MultiPointM,
MultiPointZ,
MultiPointZM,
MultiPolygon,
MultiPolygonM,
MultiPolygonZ,
MultiPolygonZM,
Point,
PointM,
PointZ,
PointZM,
Polygon,
PolygonM,
PolygonZ,
PolygonZM
}
doctest Geometry, import: true
describe "from_wkt/1:" do
prove Geometry.from_wkt("Point (4 5)") == {:ok, %Point{coordinate: [4, 5]}}
prove Geometry.from_wkt("Point (4 5) \n") == {:ok, %Point{coordinate: [4, 5]}}
prove Geometry.from_wkt("Point Z (4 5 9)") == {:ok, %PointZ{coordinate: [4, 5, 9]}}
prove Geometry.from_wkt("Point M (4 6 7)") == {:ok, %PointM{coordinate: [4, 6, 7]}}
prove Geometry.from_wkt("Point ZM (5 4 1 3)") == {:ok, %PointZM{coordinate: [5, 4, 1, 3]}}
prove Geometry.from_wkt("SRID=44;Point (4 5)") == {:ok, {%Point{coordinate: [4, 5]}, 44}}
end
describe "from_wkt/1" do
test "returns an error tuple for an invalid coordinate" do
assert Geometry.from_wkt("Point (x 5)") ==
{:error, "expected Point data", "(x 5)", {1, 0}, 6}
end
end
describe "from_wkt!/2" do
test "returns an exception for an invalid coordinate in Point" do
message = ~s[expected Point data at 2:2, got: "(7 X)\\n"]
assert_raise Geometry.Error, message, fn ->
Geometry.from_wkt!("""
Point
(7 X)
""")
end
end
test "returns an exception for an invalid coordinate in LineString" do
message = ~s(expected LineString data at 1:10, got: "(x 1, 2 2...")
assert_raise Geometry.Error, message, fn ->
Geometry.from_wkt!("LineString(x 1, 2 2, 3 3, 4 4, 5 5, 6 6, 7 7)")
end
end
end
describe "from_wkt!/2:" do
prove Geometry.from_wkt!("Point (4 5)") == %Point{coordinate: [4, 5]}
prove Geometry.from_wkt!("Point Z (4 5 9)") == %PointZ{coordinate: [4, 5, 9]}
prove Geometry.from_wkt!("Point M (4 6 7)") == %PointM{coordinate: [4, 6, 7]}
prove Geometry.from_wkt!("Point ZM (5 4 1 3)") == %PointZM{coordinate: [5, 4, 1, 3]}
prove Geometry.from_wkt!("SRID=44;Point (4 5)") == {%Point{coordinate: [4, 5]}, 44}
end
describe "to_wkt/2:" do
prove Geometry.to_wkt(Point.new(1, 2)) == "Point (1 2)"
prove Geometry.to_wkt(Point.new(1, 2), srid: 42) == "SRID=42;Point (1 2)"
end
describe "empty?/1:" do
prove Geometry.empty?(Point.new()) == true
prove Geometry.empty?(PointM.new()) == true
prove Geometry.empty?(PointZ.new()) == true
prove Geometry.empty?(PointZM.new()) == true
prove Geometry.empty?(LineString.new()) == true
prove Geometry.empty?(LineStringM.new()) == true
prove Geometry.empty?(LineStringZ.new()) == true
prove Geometry.empty?(LineStringZM.new()) == true
prove Geometry.empty?(Polygon.new()) == true
prove Geometry.empty?(PolygonM.new()) == true
prove Geometry.empty?(PolygonZ.new()) == true
prove Geometry.empty?(PolygonZM.new()) == true
prove Geometry.empty?(MultiPoint.new()) == true
prove Geometry.empty?(MultiPointM.new()) == true
prove Geometry.empty?(MultiPointZ.new()) == true
prove Geometry.empty?(MultiPointZM.new()) == true
prove Geometry.empty?(MultiLineString.new()) == true
prove Geometry.empty?(MultiLineStringM.new()) == true
prove Geometry.empty?(MultiLineStringZ.new()) == true
prove Geometry.empty?(MultiLineStringZM.new()) == true
prove Geometry.empty?(MultiPolygon.new()) == true
prove Geometry.empty?(MultiPolygonM.new()) == true
prove Geometry.empty?(MultiPolygonZ.new()) == true
prove Geometry.empty?(MultiPolygonZM.new()) == true
prove Geometry.empty?(GeometryCollection.new()) == true
prove Geometry.empty?(GeometryCollectionM.new()) == true
prove Geometry.empty?(GeometryCollectionZ.new()) == true
prove Geometry.empty?(GeometryCollectionZM.new()) == true
end
describe "from_geo_json/2" do
test "returns Point" do
geo_json =
Jason.decode!("""
{"type": "Point", "coordinates": [1, 2]}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json) == {:ok, %Point{coordinate: [1, 2]}}
end
test "returns PointZ" do
geo_json =
Jason.decode!("""
{"type": "Point", "coordinates": [1, 2, 3]}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json, type: :z) == {:ok, %PointZ{coordinate: [1, 2, 3]}}
end
test "returns PointM" do
geo_json =
Jason.decode!("""
{"type": "Point", "coordinates": [1, 2, 3]}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json, type: :m) == {:ok, %PointM{coordinate: [1, 2, 3]}}
end
test "returns PointZM" do
geo_json =
Jason.decode!("""
{"type": "Point", "coordinates": [1, 2, 3, 4]}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json, type: :zm) ==
{:ok, %PointZM{coordinate: [1, 2, 3, 4]}}
end
test "returns LineString" do
geo_json =
Jason.decode!("""
{"type": "LineString", "coordinates": [[1, 2], [3, 4]]}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json) ==
{:ok, %LineString{points: [[1, 2], [3, 4]]}}
end
test "returns LineStringM" do
geo_json =
Jason.decode!("""
{"type": "LineString", "coordinates": [[1, 2, 3], [3, 4, 5]]}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json, type: :m) ==
{:ok, %LineStringM{points: [[1, 2, 3], [3, 4, 5]]}}
end
test "returns LineStringZ" do
geo_json =
Jason.decode!("""
{"type": "LineString", "coordinates": [[1, 2, 3], [3, 4, 5]]}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json, type: :z) ==
{:ok, %LineStringZ{points: [[1, 2, 3], [3, 4, 5]]}}
end
test "returns LineStringZM" do
geo_json =
Jason.decode!("""
{"type": "LineString", "coordinates": [[1, 2, 3, 4], [3, 4, 5, 6]]}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json, type: :zm) ==
{:ok,
%LineStringZM{
points: [[1, 2, 3, 4], [3, 4, 5, 6]]
}}
end
test "returns Polygon" do
geo_json =
Jason.decode!("""
{
"type": "Polygon",
"coordinates": [
[[35, 10],
[45, 45],
[15, 40],
[10, 20],
[35, 10]],
[[20, 30],
[35, 35],
[30, 20],
[20, 30]]
]
}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json) ==
{:ok,
%Polygon{
rings: [
[
[35, 10],
[45, 45],
[15, 40],
[10, 20],
[35, 10]
],
[
[20, 30],
[35, 35],
[30, 20],
[20, 30]
]
]
}}
end
test "returns PolygonM" do
geo_json =
Jason.decode!("""
{
"type": "Polygon",
"coordinates": [
[[35, 10, 1],
[45, 45, 2],
[15, 40, 1],
[10, 20, 2],
[35, 10, 1]],
[[20, 30, 1],
[35, 35, 2],
[30, 20, 3],
[20, 30, 1]]
]
}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json, type: :m) ==
{:ok,
%PolygonM{
rings: [
[
[35, 10, 1],
[45, 45, 2],
[15, 40, 1],
[10, 20, 2],
[35, 10, 1]
],
[
[20, 30, 1],
[35, 35, 2],
[30, 20, 3],
[20, 30, 1]
]
]
}}
end
test "returns PolygonZ" do
geo_json =
Jason.decode!("""
{
"type": "Polygon",
"coordinates": [
[[35, 10, 1],
[45, 45, 2],
[15, 40, 1],
[10, 20, 2],
[35, 10, 1]],
[[20, 30, 1],
[35, 35, 2],
[30, 20, 3],
[20, 30, 1]]
]
}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json, type: :z) ==
{:ok,
%PolygonZ{
rings: [
[
[35, 10, 1],
[45, 45, 2],
[15, 40, 1],
[10, 20, 2],
[35, 10, 1]
],
[
[20, 30, 1],
[35, 35, 2],
[30, 20, 3],
[20, 30, 1]
]
]
}}
end
test "returns PolygonZM" do
geo_json =
Jason.decode!("""
{
"type": "Polygon",
"coordinates": [
[[35, 10, 1, 2],
[45, 45, 2, 2],
[15, 40, 1, 3],
[10, 20, 2, 4],
[35, 10, 1, 2]],
[[20, 30, 1, 4],
[35, 35, 2, 3],
[30, 20, 3, 2],
[20, 30, 1, 4]]
]
}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json, type: :zm) ==
{:ok,
%PolygonZM{
rings: [
[
[35, 10, 1, 2],
[45, 45, 2, 2],
[15, 40, 1, 3],
[10, 20, 2, 4],
[35, 10, 1, 2]
],
[
[20, 30, 1, 4],
[35, 35, 2, 3],
[30, 20, 3, 2],
[20, 30, 1, 4]
]
]
}}
end
test "returns MultiPoint" do
geo_json =
Jason.decode!("""
{
"type": "MultiPoint",
"coordinates": [
[1.1, 1.2],
[20.1, 20.2]
]
}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json) ==
{:ok,
%MultiPoint{
points:
MapSet.new([
[1.1, 1.2],
[20.1, 20.2]
])
}}
end
test "returns MultiPointM" do
geo_json =
Jason.decode!("""
{
"type": "MultiPoint",
"coordinates": [
[1.1, 1.2, 1.3],
[20.1, 20.2, 20.3]
]
}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json, type: :m) ==
{:ok,
%MultiPointM{
points:
MapSet.new([
[1.1, 1.2, 1.3],
[20.1, 20.2, 20.3]
])
}}
end
test "returns MultiPointZ" do
geo_json =
Jason.decode!("""
{
"type": "MultiPoint",
"coordinates": [
[1.1, 1.2, 1.3],
[20.1, 20.2, 20.3]
]
}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json, type: :z) ==
{:ok,
%MultiPointZ{
points:
MapSet.new([
[1.1, 1.2, 1.3],
[20.1, 20.2, 20.3]
])
}}
end
test "returns MultiPointZM" do
geo_json =
Jason.decode!("""
{
"type": "MultiPoint",
"coordinates": [
[1.1, 1.2, 1.3, 1.4],
[20.1, 20.2, 20.3, 20.4]
]
}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json, type: :zm) ==
{:ok,
%MultiPointZM{
points:
MapSet.new([
[1.1, 1.2, 1.3, 1.4],
[20.1, 20.2, 20.3, 20.4]
])
}}
end
test "returns MultiLineStringZM" do
geo_json =
Jason.decode!("""
{
"type": "MultiLineString",
"coordinates": [
[[40, 30, 10, 20], [30, 30, 25, 30]]
]
}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json, type: :zm) ==
{:ok,
%MultiLineStringZM{
line_strings:
MapSet.new([
[
[40, 30, 10, 20],
[30, 30, 25, 30]
]
])
}}
end
test "returns MultiLineStringZ" do
geo_json =
Jason.decode!("""
{
"type": "MultiLineString",
"coordinates": [
[[40, 30, 10], [30, 30, 25]]
]
}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json, type: :z) ==
{:ok,
%MultiLineStringZ{
line_strings:
MapSet.new([
[
[40, 30, 10],
[30, 30, 25]
]
])
}}
end
test "returns MultiLineStringM" do
geo_json =
Jason.decode!("""
{
"type": "MultiLineString",
"coordinates": [
[[40, 30, 10], [30, 30, 25]]
]
}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json, type: :m) ==
{:ok,
%MultiLineStringM{
line_strings:
MapSet.new([
[
[40, 30, 10],
[30, 30, 25]
]
])
}}
end
test "returns MultiLineString" do
geo_json =
Jason.decode!("""
{
"type": "MultiLineString",
"coordinates": [
[[40, 30], [30, 30]]
]
}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json) ==
{:ok,
%MultiLineString{
line_strings:
MapSet.new([
[
[40, 30],
[30, 30]
]
])
}}
end
test "returns MultiPolygon" do
geo_json =
Jason.decode!("""
{
"type": "MultiPolygon",
"coordinates": [
[
[[1, 1], [9, 1], [9, 8], [1, 1]],
[[6, 2], [7, 2], [7, 3], [6, 2]]
], [
[[6, 2], [8, 2], [8, 4], [6, 2]]
]
]
}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json) ==
{:ok,
%MultiPolygon{
polygons:
MapSet.new([
[
[[6, 2], [8, 2], [8, 4], [6, 2]]
],
[
[[1, 1], [9, 1], [9, 8], [1, 1]],
[[6, 2], [7, 2], [7, 3], [6, 2]]
]
])
}}
end
test "returns MultiPolygonM" do
geo_json =
Jason.decode!("""
{
"type": "MultiPolygon",
"coordinates": [
[
[[6, 2, 3], [8, 2, 4], [8, 4, 5], [6, 2, 3]]
], [
[[1, 1, 3], [9, 1, 4], [9, 8, 5], [1, 1, 3]],
[[6, 2, 4], [7, 2, 6], [7, 3, 3], [6, 2, 4]]
]
]
}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json, type: :m) ==
{:ok,
%MultiPolygonM{
polygons:
MapSet.new([
[
[
[1, 1, 3],
[9, 1, 4],
[9, 8, 5],
[1, 1, 3]
],
[
[6, 2, 4],
[7, 2, 6],
[7, 3, 3],
[6, 2, 4]
]
],
[
[
[6, 2, 3],
[8, 2, 4],
[8, 4, 5],
[6, 2, 3]
]
]
])
}}
end
test "returns MultiPolygonZ" do
geo_json =
Jason.decode!("""
{
"type": "MultiPolygon",
"coordinates": [
[
[[6, 2, 3], [8, 2, 4], [8, 4, 5], [6, 2, 3]]
], [
[[1, 1, 3], [9, 1, 4], [9, 8, 5], [1, 1, 3]],
[[6, 2, 4], [7, 2, 6], [7, 3, 3], [6, 2, 4]]
]
]
}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json, type: :z) ==
{:ok,
%MultiPolygonZ{
polygons:
MapSet.new([
[
[
[1, 1, 3],
[9, 1, 4],
[9, 8, 5],
[1, 1, 3]
],
[
[6, 2, 4],
[7, 2, 6],
[7, 3, 3],
[6, 2, 4]
]
],
[
[
[6, 2, 3],
[8, 2, 4],
[8, 4, 5],
[6, 2, 3]
]
]
])
}}
end
test "returns MultiPolygonZM" do
geo_json =
Jason.decode!("""
{
"type": "MultiPolygon",
"coordinates": [
[
[[6, 2, 3, 4], [8, 2, 4, 5], [8, 4, 5, 6], [6, 2, 3, 4]]
], [
[[1, 1, 3, 4], [9, 1, 4, 5], [9, 8, 5, 6], [1, 1, 3, 4]],
[[6, 2, 4, 3], [7, 2, 6, 7], [7, 3, 3, 4], [6, 2, 4, 3]]
]
]
}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json(geo_json, type: :zm) ==
{:ok,
%Geometry.MultiPolygonZM{
polygons:
MapSet.new([
[
[[1, 1, 3, 4], [9, 1, 4, 5], [9, 8, 5, 6], [1, 1, 3, 4]],
[[6, 2, 4, 3], [7, 2, 6, 7], [7, 3, 3, 4], [6, 2, 4, 3]]
],
[[[6, 2, 3, 4], [8, 2, 4, 5], [8, 4, 5, 6], [6, 2, 3, 4]]]
])
}}
end
test "returns GeometryCollectionZM" do
geo_json =
Jason.decode!("""
{
"type": "GeometryCollection",
"geometries": [
{"type": "Point", "coordinates": [1.1, 2.2, 3.3, 4.4]}
]
}
""")
assert Geometry.from_geo_json(geo_json, type: :zm) ==
{:ok,
%GeometryCollectionZM{
geometries: MapSet.new([%PointZM{coordinate: [1.1, 2.2, 3.3, 4.4]}])
}}
end
test "returns Feature" do
geo_json =
Jason.decode!("""
{
"type": "Feature",
"geometry": {"type": "Point", "coordinates": [1, 2, 3]},
"properties": {"facility": "Hotel"}
}
""")
assert Geometry.from_geo_json(geo_json, type: :z) ==
{:ok,
%Feature{
geometry: %PointZ{coordinate: [1, 2, 3]},
properties: %{"facility" => "Hotel"}
}}
end
test "returns FeatureCollection" do
geo_json =
Jason.decode!("""
{
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"geometry": {"type": "Point", "coordinates": [1, 2, 3]},
"properties": {"facility": "Hotel"}
}, {
"type": "Feature",
"geometry": {"type": "Point", "coordinates": [4, 3, 2]},
"properties": {"facility": "School"}
}
]
}
""")
assert Geometry.from_geo_json(geo_json, type: :z) ==
{
:ok,
%FeatureCollection{
features:
MapSet.new([
%Feature{
geometry: %PointZ{coordinate: [1, 2, 3]},
properties: %{"facility" => "Hotel"}
},
%Feature{
geometry: %PointZ{coordinate: [4, 3, 2]},
properties: %{"facility" => "School"}
}
])
}
}
end
test "returns an error for invalid data" do
geo_json =
Jason.decode!("""
{
"type": "GeometryCollection",
"geometries": [
{"type": "Point", "coordinates": ["evil", 2.2, 3.3, 4.4]}
]
}
""")
assert Geometry.from_geo_json(geo_json, type: :zm) == {:error, :invalid_data}
end
test "returns an error for an unknown type" do
json =
Jason.decode!("""
{"type": "Spunk", "coordinates": [1, 2, 3, 4]}
""")
assert Geometry.from_geo_json(json, type: :m) == {:error, :unknown_type}
end
test "returns an error for an unknown :type" do
json =
Jason.decode!("""
{"type": "Point", "coordinates": [1, 2]}
""")
assert Geometry.from_geo_json(json, type: :a) == {:error, :unknown_type}
end
end
describe "from_geo_json!/1" do
test "returns Point" do
geo_json =
Jason.decode!("""
{"type": "Point", "coordinates": [1, 2]}
""")
assert GeoJsonValidator.valid?(geo_json)
assert Geometry.from_geo_json!(geo_json) == %Point{coordinate: [1, 2]}
end
test "returns an error for an unknown type" do
json =
Jason.decode!("""
{"type": "Spunk", "coordinates": [1, 2, 3, 4]}
""")
message = "unknown type"
assert_raise Geometry.Error, message, fn ->
Geometry.from_geo_json!(json, type: :m)
end
end
end
describe "from_wkb/2" do
test "returns Point" do
wkb = "0000000001BFF3333333333333400B333333333333"
assert Geometry.from_wkb(wkb, :hex) == {:ok, %Point{coordinate: [-1.2, 3.4]}}
end
test "returns Point from WKB with trailing whitespace" do
wkb = "0000000001BFF3333333333333400B333333333333 \n"
assert Geometry.from_wkb(wkb, :hex) == {:ok, %Point{coordinate: [-1.2, 3.4]}}
end
test "returns Point with SRID" do
wkb = "00200000010000014DBFF3333333333333400B333333333333"
assert Geometry.from_wkb(wkb, :hex) == {:ok, {%Point{coordinate: [-1.2, 3.4]}, 333}}
end
test "returns empty LineStringZM" do
wkb = "00C000000200000000"
assert Geometry.from_wkb(wkb, :hex) == {:ok, %LineStringZM{}}
end
test "returns LineStringZM (xdr)" do
wkb = """
00\
C0000002\
00000002\
3FF0000000000000400000000000000040080000000000004010000000000000\
40140000000000004018000000000000401C0000000000004020000000000000\
"""
assert Geometry.from_wkb(wkb, :hex) == {
:ok,
%LineStringZM{
points: [
[1.0, 2.0, 3.0, 4.0],
[5.0, 6.0, 7.0, 8.0]
]
}
}
end
test "returns LineStringZM (ndr)" do
wkb = """
01\
020000C0\
02000000\
000000000000F03F000000000000004000000000000008400000000000001040\
000000000000144000000000000018400000000000001C400000000000002040\
"""
assert Geometry.from_wkb(wkb, :hex) == {
:ok,
%LineStringZM{
points: [
[1.0, 2.0, 3.0, 4.0],
[5.0, 6.0, 7.0, 8.0]
]
}
}
end
test "returns LineStringZ (xdr)" do
wkb = """
00\
80000002\
00000002\
3FF000000000000040000000000000004008000000000000\
40140000000000004018000000000000401C000000000000\
"""
assert Geometry.from_wkb(wkb, :hex) == {
:ok,
%LineStringZ{
points: [
[1.0, 2.0, 3.0],
[5.0, 6.0, 7.0]
]
}
}
end
test "returns LineStringM (xdr)" do
wkb = """
00\
40000002\
00000002\
3FF000000000000040000000000000004008000000000000\
40140000000000004018000000000000401C000000000000\
"""
assert Geometry.from_wkb(wkb, :hex) == {
:ok,
%LineStringM{
points: [
[1.0, 2.0, 3.0],
[5.0, 6.0, 7.0]
]
}
}
end
test "returns LineStringM (ndr) with SRID" do
wkb = """
01\
02000020\
1F020000\
02000000\
000000000000F03F0000000000000040\
00000000000014400000000000001840\
"""
assert Geometry.from_wkb(wkb, :hex) == {
:ok,
{
%LineString{
points: [[1.0, 2.0], [5.0, 6.0]]
},
543
}
}
end
test "returns PolygonZM (xdr)" do
wkb = """
00\
C0000003\
00000001\
00000005\
403E00000000000040240000000000004034000000000000402E000000000000\
4044000000000000404400000000000040240000000000004034000000000000\
403400000000000040440000000000004039000000000000402E000000000000\
40240000000000004034000000000000402E0000000000004039000000000000\
403E00000000000040240000000000004034000000000000402E000000000000\
"""
assert Geometry.from_wkb(wkb, :hex) ==
{:ok,
%PolygonZM{
rings: [
[
[30.0, 10.0, 20.0, 15.0],
[40.0, 40.0, 10.0, 20.0],
[20.0, 40.0, 25.0, 15.0],
[10.0, 20.0, 15.0, 25.0],
[30.0, 10.0, 20.0, 15.0]
]
]
}}
end
test "returns PolygonZM with hole and SRID (ndr) " do
wkb = """
01\
030000E0\
4D010000\
02000000\
05000000\
000000000080414000000000000024400000000000002E400000000000003940\
0000000000804640000000000080464000000000000024400000000000003440\
0000000000002E40000000000000444000000000000034400000000000002440\
000000000000244000000000000034400000000000002E400000000000003940\
000000000080414000000000000024400000000000002E400000000000003940\
04000000\
00000000000034400000000000003E400000000000002E400000000000002440\
0000000000804140000000000080414000000000000024400000000000004940\
0000000000003E40000000000000344000000000000039400000000000804140\
00000000000034400000000000003E400000000000002E400000000000002440\
"""
assert Geometry.from_wkb(wkb, :hex) ==
{:ok,
{
%PolygonZM{
rings: [
[
[35.0, 10.0, 15.0, 25.0],
[45.0, 45.0, 10.0, 20.0],
[15.0, 40.0, 20.0, 10.0],
[10.0, 20.0, 15.0, 25.0],
[35.0, 10.0, 15.0, 25.0]
],
[
[20.0, 30.0, 15.0, 10.0],
[35.0, 35.0, 10.0, 50.0],
[30.0, 20.0, 25.0, 35.0],
[20.0, 30.0, 15.0, 10.0]
]
]
},
333
}}
end
test "returns a MultiPointZM (xdr)" do
wkb = """
00\
c0000004\
00000003\
00\
C0000001\
403e0000000000004024000000000000402e0000000000004024000000000000\
00\
c0000001\
404400000000000040440000000000004034000000000000403E000000000000\
00\
C0000001\
40340000000000004044000000000000402E0000000000004034000000000000\
"""
assert Geometry.from_wkb(wkb, :hex) ==
{:ok,
%MultiPointZM{
points:
MapSet.new([
[30.0, 10.0, 15.0, 10.0],
[20.0, 40.0, 15.0, 20.0],
[40.0, 40.0, 20.0, 30.0]
])
}}
end
test "returns a MultiPolygonZM (ndr)" do
wkb = """
01\
060000C0\
02000000\
01\
030000C0\
02000000\
04000000\
000000000000F03F000000000000F03F00000000000008400000000000001040\
0000000000002240000000000000F03F00000000000010400000000000001440\
0000000000002240000000000000204000000000000014400000000000001840\
000000000000F03F000000000000F03F00000000000008400000000000001040\
04000000\
0000000000001840000000000000004000000000000010400000000000000840\
0000000000001C40000000000000004000000000000018400000000000001C40\
0000000000001C40000000000000084000000000000008400000000000001040\
0000000000001840000000000000004000000000000010400000000000000840\
01\
030000C0\
01000000\
04000000\
0000000000001840000000000000004000000000000008400000000000001040\
0000000000002040000000000000004000000000000010400000000000001440\
0000000000002040000000000000104000000000000014400000000000001840\
0000000000001840000000000000004000000000000008400000000000001040\
"""
multi_polygon = %MultiPolygonZM{
polygons:
MapSet.new([
[
[
[1.0, 1.0, 3.0, 4.0],
[9.0, 1.0, 4.0, 5.0],
[9.0, 8.0, 5.0, 6.0],
[1.0, 1.0, 3.0, 4.0]
],
[
[6.0, 2.0, 4.0, 3.0],
[7.0, 2.0, 6.0, 7.0],
[7.0, 3.0, 3.0, 4.0],
[6.0, 2.0, 4.0, 3.0]
]
],
[
[
[6.0, 2.0, 3.0, 4.0],
[8.0, 2.0, 4.0, 5.0],
[8.0, 4.0, 5.0, 6.0],
[6.0, 2.0, 3.0, 4.0]
]
]
])
}
assert Geometry.from_wkb(wkb, :hex) == {:ok, multi_polygon}
end
test "returns a MultiLineStringZM (xdr)" do
wkb = """
00\
C0000005\
00000002\
00\
C0000002\
00000003\
402400000000000040240000000000004034000000000000403E000000000000\
4034000000000000403400000000000040440000000000004049000000000000\
4024000000000000404400000000000040240000000000004034000000000000\
00\
C0000002\
00000002\
40440000000000004044000000000000403E0000000000004034000000000000\
403E000000000000403E00000000000040440000000000004049000000000000\
"""
multi_string = %MultiLineStringZM{
line_strings:
MapSet.new([
[
[40.0, 40.0, 30.0, 20.0],
[30.0, 30.0, 40.0, 50.0]
],
[
[10.0, 10.0, 20.0, 30.0],
[20.0, 20.0, 40.0, 50.0],
[10.0, 40.0, 10.0, 20.0]
]
])
}
assert Geometry.from_wkb(wkb, :hex) == {:ok, multi_string}
end
test "returns an error tuple for an incomplete MultiPointZM (xdr)" do
wkb = """
00\
C0000004\
00000003\
00\
C000\
"""
assert Geometry.from_wkb(wkb, :hex) == {:error, "expected geometry code", "C000", 20}
end
test "returns an error tuple for an invalid geometry in MultiPointZM (xdr)" do
wkb = """
00\
C0000004\
00000003\
00\
C0000002\
"""
assert Geometry.from_wkb(wkb, :hex) ==
{:error, ~s(unexpected code "C0000002" for sub-geometry), "", 20}
assert Geometry.from_wkb(Hex.to_binary(wkb)) ==
{:error, "unexpected code 3221225474 for sub-geometry", "", 10}
end
test "returns an error tuple for an unexpected endian" do
wkb = """
00\
C0000004\
00000003\
01\
C0000001\
"""
assert Geometry.from_wkb(wkb, :hex) ==
{:error, "expected endian flag \"00\", got \"01\"", "C0000001", 18}
assert Geometry.from_wkb(Hex.to_binary(wkb)) ==
{:error, "expected endian :xdr", <<1, 192, 0, 0, 1>>, 9}
end
test "returns an error tuple for a changing endian (ndr)" do
wkb = """
01\
040000C0\
03000000\
00\
C0000001\
"""
assert Geometry.from_wkb(wkb, :hex) ==
{:error, ~s(expected endian flag "01", got "00"), "C0000001", 18}
end
test "returns an error tuple for an invalid endian in sub-category (ndr)" do
wkb = """
01\
040000C0\
03000000\
66\
C0000001\
"""
assert Geometry.from_wkb(wkb, :hex) ==
{:error, ~s(expected endian flag "01", got "66"), "C0000001", 18}
end
test "returns an error tuple for an invalid sub-category code (ndr)" do
wkb = """
01\
040000C0\
03000000\
01\
C00XXX01\
"""
assert Geometry.from_wkb(wkb, :hex) ==
{:error, ~s(invalid sub-geomtry code: "C00XXX01"), "", 20}
end
test "returns an error tuple for an unknown sub-category code (ndr)" do
wkb = """
01\
040000C0\
03000000\
01\
C00AAA01\
"""
assert Geometry.from_wkb(wkb, :hex) ==
{:error, ~s(unknown sub-geomtry code: "C00AAA01"), "", 20}
assert Geometry.from_wkb(Hex.to_binary(wkb)) ==
{:error, "unknown sub-geomtry code: 27921088", "", 10}
end
test "returns an error tuple for an unknown endian flag" do
wkb = "1100"
assert Geometry.from_wkb(wkb, :hex) ==
{:error, ~s(expected endian flag "00" or "01", got "11"), "00", 0}
assert Geometry.from_wkb(Hex.to_binary(wkb)) ==
{:error, "expected endian flag", <<0x11, 0x0>>, 0}
end
test "returns an error tuple for an unknown geometry code" do
wkb = "00123456780000"
assert Geometry.from_wkb(wkb, :hex) ==
{:error, ~s(unknown geomtry code: "12345678"), "0000", 2}
assert Geometry.from_wkb(Hex.to_binary(wkb)) ==
{:error, "unknown geomtry code: 305419896", <<0, 0>>, 1}
end
test "returns an error tuple for an invalid geometry code" do
wkb = "00X2345678rest"
assert Geometry.from_wkb(wkb, :hex) ==
{:error, ~s(invalid geomtry code: "X2345678"), "rest", 2}
end
test "returns an error tuple for invalid coordinate" do
wkb = "0000000001invalid.."
assert Geometry.from_wkb(wkb, :hex) == {:error, "invalid coordinate", "invalid..", 10}
end
test "returns an error tuple for invalid y-coordinate in Point" do
wkb = "0000000001BFF3333333333333400B333333333XYZ"
assert Geometry.from_wkb(wkb, :hex) ==
{:error, ~s(expected float, got "400B333333333XYZ"), "", 26}
end
test "returns an error tuple for invalid x-coordinate in Point" do
wkb = "0000000001BFF333 333333333400B333333333ABC"
assert Geometry.from_wkb(wkb, :hex) ==
{:error, ~s(expected float, got "BFF333 333333333"), "", 10}
end
test "returns an error tuple for invalid x-coordinate in PointZ" do
wkb = """
01\
01000080\
3333333333*3F3BF\
3333333333330B40\
0000000000001440\
"""
assert Geometry.from_wkb(wkb, :hex) ==
{:error, ~s(expected float, got "3333333333*3F3BF"), "", 10}
end
test "returns an error tuple for invalid y-coordinate in PointZ" do
wkb = """
01\
01000080\
333333333333F3BF\
333*333333330B40\
0000000000001440\
"""
assert Geometry.from_wkb(wkb, :hex) ==
{:error, ~s(expected float, got "333*333333330B40"), "", 26}
end
test "returns an error tuple for invalid z-coordinate in PointZ" do
wkb = """
01\
01000080\
333333333333F3BF\
3333333333330B40\
000000.000001440\
"""
assert Geometry.from_wkb(wkb, :hex) ==
{:error, ~s(expected float, got "000000.000001440"), "", 42}
end
test "returns an error tuple for invalid x-coordinate in PointM" do
wkb = """
01\
01000040\
3333333333*3F3BF\
3333333333330B40\
0000000000001440\
"""
assert Geometry.from_wkb(wkb, :hex) ==
{:error, ~s(expected float, got "3333333333*3F3BF"), "", 10}
end
test "returns an error tuple for invalid y-coordinate in PointM" do
wkb = """
01\
01000040\
333333333333F3BF\
333*333333330B40\
0000000000001440\
"""
assert Geometry.from_wkb(wkb, :hex) ==
{:error, ~s(expected float, got "333*333333330B40"), "", 26}
end
test "returns an error tuple for invalid m-coordinate in PointM" do
wkb = """
01\
01000040\
333333333333F3BF\
3333333333330B40\
000000.000001440\
"""
assert Geometry.from_wkb(wkb, :hex) ==
{:error, ~s(expected float, got "000000.000001440"), "", 42}
end
test "returns an error tuple for a missing SRID" do
wkb = "01010000A05C"
assert Geometry.from_wkb(wkb, :hex) == {:error, ~s(expected SRID, got "5C"), "5C", 10}
end
test "returns an error tuple for an invalid SRID" do
wkb = "01010000A05C.50000333"
assert Geometry.from_wkb(wkb, :hex) == {:error, ~s(invalid SRID "5C.50000"), "333", 10}
end
test "returns an error tuple for too much data" do
wkb = "0000000001BFF3332333333333400B333333333ABC0000"
assert Geometry.from_wkb(wkb, :hex) == {:error, "expected EOS", "0000", 42}
end
test "returns an error tuple for incomplete length in LineStringZM" do
wkb = """
01\
020000C0\
0200\
"""
assert {:error, ~s(expected length, got "0200"), "0200", 10} = Geometry.from_wkb(wkb, :hex)
end
test "returns an error tuple for invalid length in LineStringZM" do
wkb = """
01\
020000C0\
0200.000\
9A9999999999F1BF\
9A999999999901C0\
6666666666660AC0\
9A999999999911C0\
0000000000001640\
6666666666661A40\
CDCCCCCCCCCC1E40\
9A99999999992140\
"""
assert {:error, ~s(invalid length "0200.000"), _rest, 10} = Geometry.from_wkb(wkb, :hex)
end
test "returns an error tuple for invalid x-coordinate in LineStringZM" do
wkb = """
01\
020000C0\
02000000\
9A99999999.9F1BF\
9A999999999901C0\
6666666666660AC0\
9A999999999911C0\
0000000000001640\
6666666666661A40\
CDCCCCCCCCCC1E40\
9A99999999992140\
"""
assert {:error, ~s(expected float, got "9A99999999.9F1BF"), _rest, 18} =
Geometry.from_wkb(wkb, :hex)
end
test "returns an error tuple for invalid y-coordinate in LineStringZM" do
wkb = """
01\
020000C0\
02000000\
9A9999999999F1BF\
9A99999999990.C0\
6666666666660AC0\
9A999999999911C0\
0000000000001640\
6666666666661A40\
CDCCCCCCCCCC1E40\
9A99999999992140\
"""
assert {:error, ~s(expected float, got "9A99999999990.C0"), _rest, 34} =
Geometry.from_wkb(wkb, :hex)
end
test "returns an error tuple for invalid z-coordinate in LineStringZM" do
wkb = """
01\
020000C0\
02000000\
9A9999999999F1BF\
9A999999999901C0\
6666666666660AC0\
9A999999999911C0\
0000000000001640\
6666666666661A40\
CDCCCCCCCCCC.E40\
9A99999999992140\
"""
assert {:error, ~s(expected float, got "CDCCCCCCCCCC.E40"), _rest, 114} =
Geometry.from_wkb(wkb, :hex)
end
test "returns an error tuple for invalid m-coordinate in LineStringZM" do
wkb = """
01\
020000C0\
02000000\
9A9999999999F1BF\
9A999999999901C0\
6666666666660AC0\
9A999999999911C0\
0000000000001640\
6666666666661A40\
CDCCCCCCCCCC1E40\
9A9999999999.140\
"""
assert {:error, ~s(expected float, got "9A9999999999.140"), _rest, 130} =
Geometry.from_wkb(wkb, :hex)
end
test "returns an error tuple for an incomplete geometry code" do
wkb = """
01\
0200\
"""
assert Geometry.from_wkb(wkb, :hex) == {:error, "expected geometry code", "0200", 2}
end
test "returns an error tuple for an empty string" do
assert Geometry.from_wkb("", :hex) == {:error, ~s(expected endian flag "00" or "01"), "", 0}
end
test "returns an error tuple for an empty bit-string" do
assert Geometry.from_wkb(<<>>) == {:error, "expected endian flag", "", 0}
end
end
describe "from_wkb!/2" do
test "returns Point" do
wkb = "0000000001BFF3333333333333400B333333333333"
assert Geometry.from_wkb!(wkb, :hex) == %Point{coordinate: [-1.2, 3.4]}
end
test "returns Point with SRID" do
wkb = "00200000010000014DBFF3333333333333400B333333333333"
assert Geometry.from_wkb!(wkb, :hex) == {%Point{coordinate: [-1.2, 3.4]}, 333}
end
test "returns an exception for invalid m-coordinate in LineStringZM" do
wkb = """
01\
020000C0\
02000000\
9A9999999999F1BF\
9A999999999901C0\
6666666666660AC0\
9A999999999911C0\
0000000000001640\
6666666666661A40\
CDCCCCCCCCCC1E40\
9A9999999999.140\
"""
message = ~s(expected float, got "9A9999999999.140", at position 130)
assert_raise Geometry.Error, message, fn ->
Geometry.from_wkb!(wkb, :hex)
end
end
end
end
| 27.596273 | 98 | 0.460792 |
f7ae7eebfeb63e638a9ea54ae2faecedba8c25fb | 325 | ex | Elixir | lib/rill/message_store/ecto/postgres/session.ex | Carburetor/rill | 6ba477373cff28f56fbead9a316166d994da67d3 | [
"MIT"
] | null | null | null | lib/rill/message_store/ecto/postgres/session.ex | Carburetor/rill | 6ba477373cff28f56fbead9a316166d994da67d3 | [
"MIT"
] | null | null | null | lib/rill/message_store/ecto/postgres/session.ex | Carburetor/rill | 6ba477373cff28f56fbead9a316166d994da67d3 | [
"MIT"
] | null | null | null | defmodule Rill.MessageStore.Ecto.Postgres.Session do
alias Rill.MessageStore.Ecto.Postgres, as: MessageStore
alias Rill.MessageStore.Ecto.Postgres.Database
alias Rill.Session
def new(repo) when is_atom(repo) do
session = Session.new(MessageStore, Database)
Session.put_config(session, :repo, repo)
end
end
| 29.545455 | 57 | 0.775385 |
f7ae906f2908eb6f41ff48289b48c7c069501079 | 1,123 | exs | Elixir | mix.exs | ananthakumaran/redix_sentinel | 7d17d07287a324024816ccaf4222e455e73c569f | [
"MIT"
] | 5 | 2017-09-06T09:40:39.000Z | 2019-12-12T15:47:16.000Z | mix.exs | ananthakumaran/redix_sentinel | 7d17d07287a324024816ccaf4222e455e73c569f | [
"MIT"
] | 4 | 2017-09-07T15:33:51.000Z | 2018-10-28T09:47:09.000Z | mix.exs | ananthakumaran/redix_sentinel | 7d17d07287a324024816ccaf4222e455e73c569f | [
"MIT"
] | 2 | 2018-05-14T05:31:36.000Z | 2018-10-23T15:37:28.000Z | defmodule RedixSentinel.Mixfile do
use Mix.Project
@version "0.6.1"
def project do
[
app: :redix_sentinel,
version: @version,
elixir: "~> 1.2",
description: "Redix with sentinel support",
package: package(),
docs: docs(),
dialyzer: [
plt_add_deps: :transitive,
flags: [:unmatched_returns, :race_conditions, :error_handling, :underspecs]
],
deps: deps()
]
end
def application do
[extra_applications: [:logger]]
end
defp deps do
[
{:redix, "~> 0.6.1"},
{:connection, "~> 1.0.3"},
{:ex_doc, "~> 0.16", only: :dev},
{:mix_test_watch, "~> 0.2", only: :dev},
{:toxiproxy, "~> 0.3", only: :test}
]
end
defp package do
%{
licenses: ["MIT"],
links: %{"Github" => "https://github.com/ananthakumaran/redix_sentinel"},
maintainers: ["[email protected]"]
}
end
defp docs do
[
source_url: "https://github.com/ananthakumaran/redix_sentinel",
source_ref: "v#{@version}",
main: RedixSentinel,
extras: ["README.md"]
]
end
end
| 21.188679 | 83 | 0.560107 |
f7aeaad475a1127c281b136eb47432109c7e60e9 | 30,823 | ex | Elixir | apps/language_server/lib/language_server/server.ex | SerenityIK/elixir-ls | 9569197be87809c241360a6ef1f5a9fffd25ab74 | [
"Apache-2.0"
] | null | null | null | apps/language_server/lib/language_server/server.ex | SerenityIK/elixir-ls | 9569197be87809c241360a6ef1f5a9fffd25ab74 | [
"Apache-2.0"
] | null | null | null | apps/language_server/lib/language_server/server.ex | SerenityIK/elixir-ls | 9569197be87809c241360a6ef1f5a9fffd25ab74 | [
"Apache-2.0"
] | null | null | null | defmodule ElixirLS.LanguageServer.Server do
@moduledoc """
Language Server Protocol server
This server tracks open files, attempts to rebuild the project when a file changes, and handles
requests from the IDE (for things like autocompletion, hover, etc.)
Notifications from the IDE are handled synchronously, whereas requests can be handled sychronously
or asynchronously.
When possible, handling the request asynchronously has several advantages. The asynchronous
request handling cannot modify the server state. That way, if the process handling the request
crashes, we can report that error to the client and continue knowing that the state is
uncorrupted. Also, asynchronous requests can be cancelled by the client if they're taking too long
or the user no longer cares about the result.
"""
use GenServer
alias ElixirLS.LanguageServer.{SourceFile, Build, Protocol, JsonRpc, Dialyzer}
alias ElixirLS.LanguageServer.Providers.{
Completion,
Hover,
Definition,
Implementation,
References,
Formatting,
SignatureHelp,
DocumentSymbols,
WorkspaceSymbols,
OnTypeFormatting,
CodeLens,
ExecuteCommand
}
alias ElixirLS.Utils.Launch
use Protocol
defstruct [
:server_instance_id,
:build_ref,
:dialyzer_sup,
:client_capabilities,
:root_uri,
:project_dir,
:settings,
build_diagnostics: [],
dialyzer_diagnostics: [],
needs_build?: false,
load_all_modules?: false,
build_running?: false,
analysis_ready?: false,
received_shutdown?: false,
requests: %{},
# Tracks source files that are currently open in the editor
source_files: %{},
awaiting_contracts: [],
supports_dynamic: false
]
defmodule InvalidParamError do
defexception [:uri, :message]
@impl true
def exception(uri) do
msg = "invalid URI: #{inspect(uri)}"
%InvalidParamError{message: msg, uri: uri}
end
end
@watched_extensions [".ex", ".exs", ".erl", ".hrl", ".yrl", ".xrl", ".eex", ".leex"]
## Client API
def start_link(name \\ nil) do
GenServer.start_link(__MODULE__, :ok, name: name)
end
def receive_packet(server \\ __MODULE__, packet) do
GenServer.cast(server, {:receive_packet, packet})
end
def build_finished(server \\ __MODULE__, result) do
GenServer.cast(server, {:build_finished, result})
end
def dialyzer_finished(server \\ __MODULE__, diagnostics, build_ref) do
GenServer.cast(server, {:dialyzer_finished, diagnostics, build_ref})
end
def rebuild(server \\ __MODULE__) do
GenServer.cast(server, :rebuild)
end
def suggest_contracts(server \\ __MODULE__, uri) do
GenServer.call(server, {:suggest_contracts, uri}, :infinity)
end
defguardp is_initialized(server_instance_id) when not is_nil(server_instance_id)
## Server Callbacks
@impl GenServer
def init(:ok) do
{:ok, %__MODULE__{}}
end
@impl GenServer
def handle_call({:request_finished, id, result}, _from, state) do
case result do
{:error, type, msg} -> JsonRpc.respond_with_error(id, type, msg)
{:ok, result} -> JsonRpc.respond(id, result)
end
state = %{state | requests: Map.delete(state.requests, id)}
{:reply, :ok, state}
end
@impl GenServer
def handle_call({:suggest_contracts, uri}, from, state) do
case state do
%{analysis_ready?: true, source_files: %{^uri => %{dirty?: false}}} ->
{:reply, Dialyzer.suggest_contracts([SourceFile.path_from_uri(uri)]), state}
_ ->
awaiting_contracts = reject_awaiting_contracts(state.awaiting_contracts, uri)
{:noreply, %{state | awaiting_contracts: [{from, uri} | awaiting_contracts]}}
end
end
@impl GenServer
def handle_cast({:build_finished, {status, diagnostics}}, state)
when status in [:ok, :noop, :error] and is_list(diagnostics) do
{:noreply, handle_build_result(status, diagnostics, state)}
end
@impl GenServer
def handle_cast({:dialyzer_finished, diagnostics, build_ref}, state) do
{:noreply, handle_dialyzer_result(diagnostics, build_ref, state)}
end
@impl GenServer
def handle_cast({:receive_packet, request(id, _, _) = packet}, state) do
{:noreply, handle_request_packet(id, packet, state)}
end
@impl GenServer
def handle_cast({:receive_packet, request(id, method)}, state) do
{:noreply, handle_request_packet(id, request(id, method, nil), state)}
end
@impl GenServer
def handle_cast(
{:receive_packet, notification(_) = packet},
state = %{received_shutdown?: false, server_instance_id: server_instance_id}
)
when is_initialized(server_instance_id) do
{:noreply, handle_notification(packet, state)}
end
@impl GenServer
def handle_cast({:receive_packet, notification(_) = packet}, state) do
case packet do
notification("exit") ->
{:noreply, handle_notification(packet, state)}
_ ->
{:noreply, state}
end
end
@impl GenServer
def handle_cast(:rebuild, state) do
{:noreply, trigger_build(state)}
end
@impl GenServer
def handle_info(:default_config, state) do
state =
case state do
%{settings: nil} ->
JsonRpc.show_message(
:info,
"Did not receive workspace/didChangeConfiguration notification after 5 seconds. " <>
"Using default settings."
)
set_settings(state, %{})
_ ->
state
end
{:noreply, state}
end
@impl GenServer
def handle_info({:DOWN, ref, _, _pid, reason}, %{build_ref: ref, build_running?: true} = state) do
state = %{state | build_running?: false}
state =
case reason do
:normal -> state
_ -> handle_build_result(:error, [Build.exception_to_diagnostic(reason)], state)
end
if reason == :normal do
WorkspaceSymbols.notify_build_complete()
end
state = if state.needs_build?, do: trigger_build(state), else: state
{:noreply, state}
end
@impl GenServer
def handle_info({:DOWN, _ref, :process, pid, reason}, %{requests: requests} = state) do
state =
case Enum.find(requests, &match?({_, ^pid}, &1)) do
{id, _} ->
error_msg = Exception.format_exit(reason)
JsonRpc.respond_with_error(id, :server_error, error_msg)
%{state | requests: Map.delete(requests, id)}
nil ->
state
end
{:noreply, state}
end
## Helpers
defp handle_notification(notification("initialized"), state) do
# If we don't receive workspace/didChangeConfiguration for 5 seconds, use default settings
Process.send_after(self(), :default_config, 5000)
if state.supports_dynamic do
watchers = for ext <- @watched_extensions, do: %{"globPattern" => "**/*." <> ext}
register_capability_result =
JsonRpc.register_capability_request("workspace/didChangeWatchedFiles", %{
"watchers" => watchers
})
case register_capability_result do
{:ok, nil} ->
:ok
other ->
JsonRpc.log_message(
:error,
"client/registerCapability returned: #{inspect(other)}"
)
end
end
state
end
defp handle_notification(cancel_request(id), %{requests: requests} = state) do
case requests do
%{^id => pid} ->
Process.exit(pid, :cancelled)
JsonRpc.respond_with_error(id, :request_cancelled, "Request cancelled")
%{state | requests: Map.delete(requests, id)}
_ ->
JsonRpc.log_message(
:warning,
"Received $/cancelRequest for unknown request id: #{inspect(id)}"
)
state
end
end
# We don't start performing builds until we receive settings from the client in case they've set
# the `projectDir` or `mixEnv` settings. If the settings don't match the format expected, leave
# settings unchanged or set default settings if this is the first request.
defp handle_notification(did_change_configuration(changed_settings), state) do
prev_settings = state.settings || %{}
new_settings =
case changed_settings do
%{"elixirLS" => changed_settings} when is_map(changed_settings) ->
Map.merge(prev_settings, changed_settings)
_ ->
prev_settings
end
set_settings(state, new_settings)
end
defp handle_notification(notification("exit"), state) do
code = if state.received_shutdown?, do: 0, else: 1
unless Application.get_env(:language_server, :test_mode) do
System.halt(code)
else
Process.exit(self(), {:exit_code, code})
end
state
end
defp handle_notification(did_open(uri, _language_id, version, text), state) do
if Map.has_key?(state.source_files, uri) do
# An open notification must not be sent more than once without a corresponding
# close notification send before
JsonRpc.log_message(
:warning,
"Received textDocument/didOpen for file that is already open. Received uri: #{
inspect(uri)
}"
)
state
else
source_file = %SourceFile{text: text, version: version}
Build.publish_file_diagnostics(
uri,
state.build_diagnostics ++ state.dialyzer_diagnostics,
source_file
)
put_in(state.source_files[uri], source_file)
end
end
defp handle_notification(did_close(uri), state) do
if not Map.has_key?(state.source_files, uri) do
# A close notification requires a previous open notification to be sent
JsonRpc.log_message(
:warning,
"Received textDocument/didClose for file that is not open. Received uri: #{inspect(uri)}"
)
state
else
awaiting_contracts = reject_awaiting_contracts(state.awaiting_contracts, uri)
%{
state
| source_files: Map.delete(state.source_files, uri),
awaiting_contracts: awaiting_contracts
}
end
end
defp handle_notification(did_change(uri, version, content_changes), state) do
if not Map.has_key?(state.source_files, uri) do
# The source file was not marked as open either due to a bug in the
# client or a restart of the server. So just ignore the message and do
# not update the state
JsonRpc.log_message(
:warning,
"Received textDocument/didChange for file that is not open. Received uri: #{inspect(uri)}"
)
state
else
update_in(state.source_files[uri], fn source_file ->
%SourceFile{source_file | version: version, dirty?: true}
|> SourceFile.apply_content_changes(content_changes)
end)
end
end
defp handle_notification(did_save(uri), state) do
if not Map.has_key?(state.source_files, uri) do
JsonRpc.log_message(
:warning,
"Received textDocument/didSave for file that is not open. Received uri: #{inspect(uri)}"
)
state
else
WorkspaceSymbols.notify_uris_modified([uri])
state = update_in(state.source_files[uri], &%{&1 | dirty?: false})
trigger_build(state)
end
end
defp handle_notification(did_change_watched_files(changes), state) do
needs_build =
Enum.any?(changes, fn %{"uri" => uri, "type" => type} ->
Path.extname(uri) in @watched_extensions and
(type in [1, 3] or not Map.has_key?(state.source_files, uri) or
state.source_files[uri].dirty?)
end)
source_files =
changes
|> Enum.reduce(state.source_files, fn
%{"type" => 3}, acc ->
# deleted file still open in editor, keep dirty flag
acc
%{"uri" => uri}, acc ->
# file created/updated - set dirty flag to false if file contents are equal
case acc[uri] do
%SourceFile{text: source_file_text, dirty?: true} = source_file ->
case File.read(SourceFile.path_from_uri(uri)) do
{:ok, ^source_file_text} ->
Map.put(acc, uri, %SourceFile{source_file | dirty?: false})
{:ok, _} ->
acc
{:error, reason} ->
JsonRpc.log_message(:warning, "Unable to read #{uri}: #{inspect(reason)}")
# keep dirty if read fails
acc
end
_ ->
acc
end
end)
state = %{state | source_files: source_files}
changes
|> Enum.map(& &1["uri"])
|> Enum.uniq()
|> WorkspaceSymbols.notify_uris_modified()
if needs_build, do: trigger_build(state), else: state
end
defp handle_notification(%{"method" => "$/" <> _}, state) do
# not supported "$/" notifications may be safely ignored
state
end
defp handle_notification(packet, state) do
JsonRpc.log_message(:warning, "Received unmatched notification: #{inspect(packet)}")
state
end
defp handle_request_packet(id, packet, state = %{server_instance_id: server_instance_id})
when not is_initialized(server_instance_id) do
case packet do
initialize_req(_id, _root_uri, _client_capabilities) ->
{:ok, result, state} = handle_request(packet, state)
JsonRpc.respond(id, result)
state
_ ->
JsonRpc.respond_with_error(id, :server_not_initialized)
state
end
end
defp handle_request_packet(id, packet, state = %{received_shutdown?: false}) do
case handle_request(packet, state) do
{:ok, result, state} ->
JsonRpc.respond(id, result)
state
{:error, type, msg, state} ->
JsonRpc.respond_with_error(id, type, msg)
state
{:async, fun, state} ->
{pid, _ref} = handle_request_async(id, fun)
%{state | requests: Map.put(state.requests, id, pid)}
end
rescue
e in InvalidParamError ->
JsonRpc.respond_with_error(id, :invalid_params, e.message)
state
end
defp handle_request_packet(id, _packet, state) do
JsonRpc.respond_with_error(id, :invalid_request)
state
end
defp handle_request(
initialize_req(_id, root_uri, client_capabilities),
state = %{server_instance_id: server_instance_id}
)
when not is_initialized(server_instance_id) do
show_version_warnings()
server_instance_id =
:crypto.strong_rand_bytes(32) |> Base.url_encode64() |> binary_part(0, 32)
state =
case root_uri do
"file://" <> _ ->
root_path = SourceFile.path_from_uri(root_uri)
File.cd!(root_path)
%{state | root_uri: root_uri}
nil ->
state
end
# Explicitly request file watchers from the client if supported
supports_dynamic =
get_in(client_capabilities, [
"textDocument",
"codeAction",
"dynamicRegistration"
])
state = %{
state
| client_capabilities: client_capabilities,
server_instance_id: server_instance_id,
supports_dynamic: supports_dynamic
}
{:ok,
%{
"capabilities" => server_capabilities(server_instance_id),
"serverInfo" => %{
"name" => "ElixirLS",
"version" => "#{Launch.language_server_version()}"
}
}, state}
end
defp handle_request(request(_id, "shutdown", _params), state) do
{:ok, nil, %{state | received_shutdown?: true}}
end
defp handle_request(definition_req(_id, uri, line, character), state) do
source_file = get_source_file(state, uri)
fun = fn ->
Definition.definition(uri, source_file.text, line, character)
end
{:async, fun, state}
end
defp handle_request(implementation_req(_id, uri, line, character), state) do
fun = fn ->
Implementation.implementation(uri, state.source_files[uri].text, line, character)
end
{:async, fun, state}
end
defp handle_request(references_req(_id, uri, line, character, include_declaration), state) do
source_file = get_source_file(state, uri)
fun = fn ->
{:ok,
References.references(
source_file.text,
uri,
line,
character,
include_declaration
)}
end
{:async, fun, state}
end
defp handle_request(hover_req(_id, uri, line, character), state) do
source_file = get_source_file(state, uri)
fun = fn ->
Hover.hover(source_file.text, line, character)
end
{:async, fun, state}
end
defp handle_request(document_symbol_req(_id, uri), state) do
source_file = get_source_file(state, uri)
fun = fn ->
hierarchical? =
get_in(state.client_capabilities, [
"textDocument",
"documentSymbol",
"hierarchicalDocumentSymbolSupport"
]) || false
if String.ends_with?(uri, [".ex", ".exs"]) do
DocumentSymbols.symbols(uri, source_file.text, hierarchical?)
else
{:ok, []}
end
end
{:async, fun, state}
end
defp handle_request(workspace_symbol_req(_id, query), state) do
fun = fn ->
WorkspaceSymbols.symbols(query)
end
{:async, fun, state}
end
defp handle_request(completion_req(_id, uri, line, character), state) do
source_file = get_source_file(state, uri)
snippets_supported =
!!get_in(state.client_capabilities, [
"textDocument",
"completion",
"completionItem",
"snippetSupport"
])
# deprecated as of Language Server Protocol Specification - 3.15
deprecated_supported =
!!get_in(state.client_capabilities, [
"textDocument",
"completion",
"completionItem",
"deprecatedSupport"
])
tags_supported =
case get_in(state.client_capabilities, [
"textDocument",
"completion",
"completionItem",
"tagSupport"
]) do
nil -> []
%{"valueSet" => value_set} -> value_set
end
signature_help_supported =
!!get_in(state.client_capabilities, ["textDocument", "signatureHelp"])
locals_without_parens =
case SourceFile.formatter_opts(uri) do
{:ok, opts} -> Keyword.get(opts, :locals_without_parens, [])
:error -> []
end
|> MapSet.new()
signature_after_complete = Map.get(state.settings || %{}, "signatureAfterComplete", true)
fun = fn ->
Completion.completion(source_file.text, line, character,
snippets_supported: snippets_supported,
deprecated_supported: deprecated_supported,
tags_supported: tags_supported,
signature_help_supported: signature_help_supported,
locals_without_parens: locals_without_parens,
signature_after_complete: signature_after_complete
)
end
{:async, fun, state}
end
defp handle_request(formatting_req(_id, uri, _options), state) do
source_file = get_source_file(state, uri)
fun = fn -> Formatting.format(source_file, uri, state.project_dir) end
{:async, fun, state}
end
defp handle_request(signature_help_req(_id, uri, line, character), state) do
source_file = get_source_file(state, uri)
fun = fn -> SignatureHelp.signature(source_file, line, character) end
{:async, fun, state}
end
defp handle_request(on_type_formatting_req(_id, uri, line, character, ch, options), state) do
source_file = get_source_file(state, uri)
fun = fn ->
OnTypeFormatting.format(source_file, line, character, ch, options)
end
{:async, fun, state}
end
defp handle_request(code_lens_req(_id, uri), state) do
source_file = get_source_file(state, uri)
fun = fn ->
with {:ok, spec_code_lenses} <- get_spec_code_lenses(state, uri, source_file),
{:ok, test_code_lenses} <- get_test_code_lenses(state, uri, source_file) do
{:ok, spec_code_lenses ++ test_code_lenses}
else
{:error, %ElixirSense.Core.Metadata{error: {line, error_msg}}} ->
{:error, :code_lens_error, "#{line}: #{error_msg}"}
{:error, error} ->
{:error, :code_lens_error, "Error while building code lenses: #{inspect(error)}"}
error ->
error
end
end
{:async, fun, state}
end
defp handle_request(execute_command_req(_id, command, args) = req, state) do
{:async,
fn ->
case ExecuteCommand.execute(command, args, state) do
{:error, :invalid_request, _msg} = res ->
JsonRpc.log_message(:warning, "Unmatched request: #{inspect(req)}")
res
other ->
other
end
end, state}
end
defp handle_request(macro_expansion(_id, whole_buffer, selected_macro, macro_line), state) do
x = ElixirSense.expand_full(whole_buffer, selected_macro, macro_line)
{:ok, x, state}
end
defp handle_request(%{"method" => "$/" <> _}, state) do
# "$/" requests that the server doesn't support must return method_not_found
{:error, :method_not_found, nil, state}
end
defp handle_request(req, state) do
JsonRpc.log_message(:warning, "Unmatched request: #{inspect(req)}")
{:error, :invalid_request, nil, state}
end
defp handle_request_async(id, func) do
parent = self()
spawn_monitor(fn ->
result =
try do
func.()
rescue
e in InvalidParamError ->
{:error, :invalid_params, e.message}
end
GenServer.call(parent, {:request_finished, id, result}, :infinity)
end)
end
defp server_capabilities(server_instance_id) do
%{
"macroExpansion" => true,
"textDocumentSync" => %{
"change" => 2,
"openClose" => true,
"save" => %{"includeText" => true}
},
"hoverProvider" => true,
"completionProvider" => %{"triggerCharacters" => Completion.trigger_characters()},
"definitionProvider" => true,
"implementationProvider" => true,
"referencesProvider" => true,
"documentFormattingProvider" => true,
"signatureHelpProvider" => %{"triggerCharacters" => SignatureHelp.trigger_characters()},
"documentSymbolProvider" => true,
"workspaceSymbolProvider" => true,
"documentOnTypeFormattingProvider" => %{"firstTriggerCharacter" => "\n"},
"codeLensProvider" => %{"resolveProvider" => false},
"executeCommandProvider" => %{"commands" => ["spec:#{server_instance_id}"]},
"workspace" => %{
"workspaceFolders" => %{"supported" => false, "changeNotifications" => false}
}
}
end
defp get_spec_code_lenses(state, uri, source_file) do
if dialyzer_enabled?(state) and !!state.settings["suggestSpecs"] do
CodeLens.spec_code_lens(state.server_instance_id, uri, source_file.text)
else
{:ok, []}
end
end
defp get_test_code_lenses(state, uri, source_file) do
if state.settings["enableTestLenses"] == true do
CodeLens.test_code_lens(uri, source_file.text)
else
{:ok, []}
end
end
# Build
defp trigger_build(state) do
if build_enabled?(state) and not state.build_running? do
fetch_deps? = Map.get(state.settings || %{}, "fetchDeps", true)
{_pid, build_ref} =
Build.build(self(), state.project_dir,
fetch_deps?: fetch_deps?,
load_all_modules?: state.load_all_modules?
)
%__MODULE__{
state
| build_ref: build_ref,
needs_build?: false,
build_running?: true,
analysis_ready?: false,
load_all_modules?: false
}
else
%__MODULE__{state | needs_build?: true, analysis_ready?: false}
end
end
defp dialyze(state) do
warn_opts =
(state.settings["dialyzerWarnOpts"] || [])
|> Enum.map(&String.to_atom/1)
Dialyzer.analyze(state.build_ref, warn_opts, dialyzer_default_format(state))
state
end
defp dialyzer_default_format(state) do
state.settings["dialyzerFormat"] || "dialyxir_long"
end
defp handle_build_result(status, diagnostics, state) do
old_diagnostics = state.build_diagnostics ++ state.dialyzer_diagnostics
state = put_in(state.build_diagnostics, diagnostics)
state =
cond do
state.needs_build? ->
state
status == :error or not dialyzer_enabled?(state) ->
put_in(state.dialyzer_diagnostics, [])
true ->
dialyze(state)
end
publish_diagnostics(
state.build_diagnostics ++ state.dialyzer_diagnostics,
old_diagnostics,
state.source_files
)
state
end
defp handle_dialyzer_result(diagnostics, build_ref, state) do
old_diagnostics = state.build_diagnostics ++ state.dialyzer_diagnostics
state = put_in(state.dialyzer_diagnostics, diagnostics)
publish_diagnostics(
state.build_diagnostics ++ state.dialyzer_diagnostics,
old_diagnostics,
state.source_files
)
# If these results were triggered by the most recent build and files are not dirty, then we know
# we're up to date and can release spec suggestions to the code lens provider
if build_ref == state.build_ref do
JsonRpc.log_message(:info, "Dialyzer analysis is up to date")
{dirty, not_dirty} =
state.awaiting_contracts
|> Enum.split_with(fn {_, uri} ->
state.source_files[uri].dirty?
end)
contracts_by_file =
not_dirty
|> Enum.map(fn {_from, uri} -> SourceFile.path_from_uri(uri) end)
|> Dialyzer.suggest_contracts()
|> Enum.group_by(fn {file, _, _, _, _} -> file end)
for {from, uri} <- not_dirty do
contracts =
contracts_by_file
|> Map.get(SourceFile.path_from_uri(uri), [])
GenServer.reply(from, contracts)
end
%{state | analysis_ready?: true, awaiting_contracts: dirty}
else
state
end
end
defp build_enabled?(state) do
is_binary(state.project_dir)
end
defp dialyzer_enabled?(state) do
Dialyzer.check_support() == :ok and build_enabled?(state) and state.dialyzer_sup != nil
end
defp publish_diagnostics(new_diagnostics, old_diagnostics, source_files) do
files =
Enum.uniq(Enum.map(new_diagnostics, & &1.file) ++ Enum.map(old_diagnostics, & &1.file))
for file <- files,
uri = SourceFile.path_to_uri(file),
do: Build.publish_file_diagnostics(uri, new_diagnostics, Map.get(source_files, uri))
end
defp show_version_warnings do
unless Version.match?(System.version(), ">= 1.8.0") do
JsonRpc.show_message(
:warning,
"Elixir versions below 1.8 are not supported. (Currently v#{System.version()})"
)
end
otp_release = String.to_integer(System.otp_release())
if otp_release < 21 do
JsonRpc.show_message(
:info,
"Erlang OTP releases below 21 are not supported (Currently OTP #{otp_release})"
)
end
case Dialyzer.check_support() do
:ok -> :ok
{:error, msg} -> JsonRpc.show_message(:info, msg)
end
:ok
end
defp set_settings(state, settings) do
enable_dialyzer =
Dialyzer.check_support() == :ok && Map.get(settings, "dialyzerEnabled", true)
mix_env = Map.get(settings, "mixEnv", "test")
mix_target = Map.get(settings, "mixTarget")
project_dir = Map.get(settings, "projectDir")
state =
state
|> set_mix_env(mix_env)
|> maybe_set_mix_target(mix_target)
|> set_project_dir(project_dir)
|> set_dialyzer_enabled(enable_dialyzer)
state = create_gitignore(state)
trigger_build(%{state | settings: settings})
end
defp set_dialyzer_enabled(state, enable_dialyzer) do
cond do
enable_dialyzer and state.dialyzer_sup == nil and is_binary(state.project_dir) ->
{:ok, pid} = Dialyzer.Supervisor.start_link(state.project_dir)
%{state | dialyzer_sup: pid}
not enable_dialyzer and state.dialyzer_sup != nil ->
Process.exit(state.dialyzer_sup, :normal)
%{state | dialyzer_sup: nil, analysis_ready?: false}
true ->
state
end
end
defp set_mix_env(state, env) do
prev_env = state.settings["mixEnv"]
if is_nil(prev_env) or env == prev_env do
Mix.env(String.to_atom(env))
else
JsonRpc.show_message(:warning, "You must restart ElixirLS after changing Mix env")
end
state
end
defp maybe_set_mix_target(state, nil), do: state
defp maybe_set_mix_target(state, target) do
set_mix_target(state, target)
end
defp set_mix_target(state, target) do
target = target || "host"
prev_target = state.settings["mixTarget"]
if is_nil(prev_target) or target == prev_target do
Mix.target(String.to_atom(target))
else
JsonRpc.show_message(:warning, "You must restart ElixirLS after changing Mix target")
end
state
end
defp set_project_dir(%{project_dir: prev_project_dir, root_uri: root_uri} = state, project_dir)
when is_binary(root_uri) do
root_dir = root_uri |> SourceFile.path_from_uri() |> Path.absname()
project_dir =
if is_binary(project_dir) do
Path.absname(Path.join(root_dir, project_dir))
else
root_dir
end
cond do
not File.dir?(project_dir) ->
JsonRpc.show_message(:error, "Project directory #{project_dir} does not exist")
state
is_nil(prev_project_dir) ->
File.cd!(project_dir)
Map.merge(state, %{project_dir: project_dir, load_all_modules?: true})
prev_project_dir != project_dir ->
JsonRpc.show_message(
:warning,
"You must restart ElixirLS after changing the project directory"
)
state
true ->
state
end
end
defp set_project_dir(state, _) do
state
end
defp create_gitignore(%{project_dir: project_dir} = state) when is_binary(project_dir) do
with gitignore_path <- Path.join([project_dir, ".elixir_ls", ".gitignore"]),
false <- File.exists?(gitignore_path),
:ok <- gitignore_path |> Path.dirname() |> File.mkdir_p(),
:ok <- File.write(gitignore_path, "*", [:write]) do
state
else
true ->
state
{:error, err} ->
JsonRpc.log_message(
:warning,
"Cannot create .elixir_ls/.gitignore, cause: #{Atom.to_string(err)}"
)
state
end
end
defp create_gitignore(state) do
JsonRpc.log_message(
:warning,
"Cannot create .elixir_ls/.gitignore, cause: project_dir not set"
)
state
end
def get_source_file(state, uri) do
case state.source_files[uri] do
nil ->
raise InvalidParamError, uri
source_file ->
source_file
end
end
defp reject_awaiting_contracts(awaiting_contracts, uri) do
Enum.reject(awaiting_contracts, fn
{from, ^uri} -> GenServer.reply(from, [])
_ -> false
end)
end
end
| 28.22619 | 100 | 0.643091 |
f7aeae0066e7088cfc3ccc93f873c6e195d1eb47 | 2,137 | exs | Elixir | config/prod.exs | Hiyori-API/checker-mal | c52f6e8a248ba160ffebc2c9369a933fc8fc4499 | [
"MIT"
] | null | null | null | config/prod.exs | Hiyori-API/checker-mal | c52f6e8a248ba160ffebc2c9369a933fc8fc4499 | [
"MIT"
] | null | null | null | config/prod.exs | Hiyori-API/checker-mal | c52f6e8a248ba160ffebc2c9369a933fc8fc4499 | [
"MIT"
] | null | null | null | import Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
#
# TODO: make host configurable
config :checker_mal, CheckerMalWeb.Endpoint,
url: [host: "sean.fish", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
config :checker_mal,
mal_wait_time: 20
# Do not print debug messages in production
config :logger, level: :info
# config :logger, level: :debug
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :checker_mal, CheckerMalWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH"),
# transport_options: [socket_opts: [:inet6]]
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :checker_mal, CheckerMalWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# Finally import the config/prod.secret.exs which loads secrets
# and configuration from environment variables.
import_config "prod.secret.exs"
| 34.467742 | 66 | 0.718297 |
f7aed34b5c5f3aebddd25ef558f79a1a03b3c200 | 2,554 | exs | Elixir | playground_project/test/crud_tdd_web/controllers/games_controller_test.exs | JohnBortotti/learning-elixir | 54ce9c50904e809065d0321d51367cea7a5b2bf8 | [
"MIT"
] | null | null | null | playground_project/test/crud_tdd_web/controllers/games_controller_test.exs | JohnBortotti/learning-elixir | 54ce9c50904e809065d0321d51367cea7a5b2bf8 | [
"MIT"
] | null | null | null | playground_project/test/crud_tdd_web/controllers/games_controller_test.exs | JohnBortotti/learning-elixir | 54ce9c50904e809065d0321d51367cea7a5b2bf8 | [
"MIT"
] | null | null | null | defmodule CrudTddWeb.GamesControllerTest do
use CrudTddWeb.ConnCase
alias CrudTdd.Lib
alias CrudTdd.Lib.Games
@create_attrs %{
grade: 42,
title: "some title"
}
@update_attrs %{
grade: 43,
title: "some updated title"
}
@invalid_attrs %{grade: nil, title: nil}
def fixture(:games) do
{:ok, games} = Lib.create_games(@create_attrs)
games
end
setup %{conn: conn} do
{:ok, conn: put_req_header(conn, "accept", "application/json")}
end
describe "index" do
test "lists all games", %{conn: conn} do
conn = get(conn, Routes.games_path(conn, :index))
assert json_response(conn, 200)["data"] == []
end
end
describe "create games" do
test "renders games when data is valid", %{conn: conn} do
conn = post(conn, Routes.games_path(conn, :create), games: @create_attrs)
assert %{"id" => id} = json_response(conn, 201)["data"]
conn = get(conn, Routes.games_path(conn, :show, id))
assert %{
"id" => id,
"grade" => 42,
"title" => "some title"
} = json_response(conn, 200)["data"]
end
test "renders errors when data is invalid", %{conn: conn} do
conn = post(conn, Routes.games_path(conn, :create), games: @invalid_attrs)
assert json_response(conn, 422)["errors"] != %{}
end
end
describe "update games" do
setup [:create_games]
test "renders games when data is valid", %{conn: conn, games: %Games{id: id} = games} do
conn = put(conn, Routes.games_path(conn, :update, games), games: @update_attrs)
assert %{"id" => ^id} = json_response(conn, 200)["data"]
conn = get(conn, Routes.games_path(conn, :show, id))
assert %{
"id" => id,
"grade" => 43,
"title" => "some updated title"
} = json_response(conn, 200)["data"]
end
test "renders errors when data is invalid", %{conn: conn, games: games} do
conn = put(conn, Routes.games_path(conn, :update, games), games: @invalid_attrs)
assert json_response(conn, 422)["errors"] != %{}
end
end
describe "delete games" do
setup [:create_games]
test "deletes chosen games", %{conn: conn, games: games} do
conn = delete(conn, Routes.games_path(conn, :delete, games))
assert response(conn, 204)
assert_error_sent 404, fn ->
get(conn, Routes.games_path(conn, :show, games))
end
end
end
defp create_games(_) do
games = fixture(:games)
%{games: games}
end
end
| 27.462366 | 92 | 0.598277 |
f7aed4cd4bf7ca86981b356df7f31b3e3cbe7458 | 2,051 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/vpn_gateway_status_high_availability_requirement_state.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/compute/lib/google_api/compute/v1/model/vpn_gateway_status_high_availability_requirement_state.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/compute/lib/google_api/compute/v1/model/vpn_gateway_status_high_availability_requirement_state.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.VpnGatewayStatusHighAvailabilityRequirementState do
@moduledoc """
Describes the high availability requirement state for the VPN connection between this Cloud VPN gateway and a peer gateway.
## Attributes
* `state` (*type:* `String.t`, *default:* `nil`) - Indicates the high availability requirement state for the VPN connection. Valid values are CONNECTION_REDUNDANCY_MET, CONNECTION_REDUNDANCY_NOT_MET.
* `unsatisfiedReason` (*type:* `String.t`, *default:* `nil`) - Indicates the reason why the VPN connection does not meet the high availability redundancy criteria/requirement. Valid values is INCOMPLETE_TUNNELS_COVERAGE.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:state => String.t() | nil,
:unsatisfiedReason => String.t() | nil
}
field(:state)
field(:unsatisfiedReason)
end
defimpl Poison.Decoder,
for: GoogleApi.Compute.V1.Model.VpnGatewayStatusHighAvailabilityRequirementState do
def decode(value, options) do
GoogleApi.Compute.V1.Model.VpnGatewayStatusHighAvailabilityRequirementState.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Compute.V1.Model.VpnGatewayStatusHighAvailabilityRequirementState do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.290909 | 224 | 0.756216 |
f7aeea1cf390954b09f29427ee5d6f8bd2f4edee | 930 | ex | Elixir | lib/main.ex | okx-code/Panacea | 3398c624c0497158ef49653ba001fed18cebc4b0 | [
"Unlicense"
] | null | null | null | lib/main.ex | okx-code/Panacea | 3398c624c0497158ef49653ba001fed18cebc4b0 | [
"Unlicense"
] | null | null | null | lib/main.ex | okx-code/Panacea | 3398c624c0497158ef49653ba001fed18cebc4b0 | [
"Unlicense"
] | null | null | null | defmodule Main do
import Atoms
def main(args) do
cond do
length(args) > 0 ->
functions = read_lines(File.read!(hd(args)))
lines = read_lines(IO.read(:all))
inputs = if length(lines) == 0, do: [], else: Stream.cycle(lines)
stack = eval([], functions, 0, inputs)
IO.inspect(case args do
[_, "-t"] -> Enum.at(stack, 0)
[_, "-j"] -> Enum.join(stack)
[_, "-o"] -> System.halt(0)
[_] -> stack
end, charlists: :as_lists, width: :infinity, limit: :infinity)
true ->
IO.write """
Usage: ./panacea <file> [options]
Options:
-t: Print top of stack instead of entire stack
-j: Join stack together at end
-o: Don't print the stack
"""
end
end
def read_lines(x) do
x
|> String.split("\n")
|> Enum.reverse()
|> tl()
|> Enum.reverse()
end
end
| 25.135135 | 73 | 0.511828 |
f7af0b3f5374233d8a5527c36c31c775ff28bb3c | 702 | exs | Elixir | test/wechat/helpers/params_parser_test.exs | sjava/ex_wechat | 521e119be8cc960453002c099d57f7474bfd7735 | [
"MIT"
] | null | null | null | test/wechat/helpers/params_parser_test.exs | sjava/ex_wechat | 521e119be8cc960453002c099d57f7474bfd7735 | [
"MIT"
] | null | null | null | test/wechat/helpers/params_parser_test.exs | sjava/ex_wechat | 521e119be8cc960453002c099d57f7474bfd7735 | [
"MIT"
] | null | null | null | defmodule Wechat.Helpers.ParamsParserTest do
use ExUnit.Case, async: true
import Wechat.Helpers.ParamsParser
defmodule Demo do
use Wechat.Api
end
test "should get [] when params is empty" do
assert [] = parse_params([])
end
test "should get right params when has value" do
assert [some: "value"] == parse_params([some: "value"])
end
test "should get right value in Wechat.Base" do
assert [appid: "yourappid", secret: "yourappsecret", token: "yourtoken"] ==
parse_params([appid: nil, secret: nil, token: nil])
end
test "should get :no_set when there is no method in api" do
assert [special: :not_set] == parse_params([special: nil], Demo)
end
end
| 26 | 79 | 0.683761 |
f7af0fafb70211a23027b9938eeb9768d5295ffb | 2,687 | exs | Elixir | test/support/email.exs | harmon25/coherence | 5c4f26d3c87f6a16638adf623d041e2723ccf2b8 | [
"MIT"
] | 1 | 2022-03-06T16:30:21.000Z | 2022-03-06T16:30:21.000Z | test/support/email.exs | ysbaddaden/coherence | 5c4f26d3c87f6a16638adf623d041e2723ccf2b8 | [
"MIT"
] | null | null | null | test/support/email.exs | ysbaddaden/coherence | 5c4f26d3c87f6a16638adf623d041e2723ccf2b8 | [
"MIT"
] | 2 | 2017-09-22T16:54:36.000Z | 2021-11-09T20:55:58.000Z | defmodule TestCoherence.Coherence.Email do
defstruct [:from, :to, :subject, :reply_to, :template, :params]
end
defmodule TestCoherence.Coherence.Mailer do
def deliver(email), do: email
end
defmodule TestCoherence.Coherence.UserEmail do
defp site_name, do: Coherence.Config.site_name(inspect Coherence.Config.module)
alias TestCoherence.Coherence.Email
require Logger
def password(user, url) do
%Email{}
|> from(from_email())
|> to(user_email(user))
|> add_reply_to
|> subject("#{site_name()} - Reset password instructions")
|> render_body("password.html", %{url: url, name: first_name(user.name)})
end
def confirmation(user, url) do
%Email{}
|> from(from_email())
|> to(user_email(user))
|> add_reply_to
|> subject("#{site_name()} - Confirm your new account")
|> render_body("confirmation.html", %{url: url, name: first_name(user.name)})
end
def invitation(invitation, url) do
%Email{}
|> from(from_email())
|> to(user_email(invitation))
|> add_reply_to
|> subject("#{site_name()} - Invitation to create a new account")
|> render_body("invitation.html", %{url: url, name: first_name(invitation.name)})
end
def unlock(user, url) do
%Email{}
|> from(from_email())
|> to(user_email(user))
|> add_reply_to
|> subject("#{site_name()} - Unlock Instructions")
|> render_body("unlock.html", %{url: url, name: first_name(user.name)})
end
defp from(email, from), do: Map.put(email, :from, from)
defp to(email, to), do: Map.put(email, :to, to)
defp reply_to(email, address), do: Map.put(email, :reply_to, address)
defp subject(email, subject), do: Map.put(email, :subject, subject)
defp render_body(email, template, params), do: struct(email, template: template, params: params)
defp add_reply_to(mail) do
case Coherence.Config.email_reply_to do
nil -> mail
true -> reply_to mail, from_email()
address -> reply_to mail, address
end
end
defp first_name(name) do
case String.split(name, " ") do
[first_name | _] -> first_name
_ -> name
end
end
defp user_email(user) do
{user.name, user.email}
end
defp from_email do
case Coherence.Config.email_from do
nil ->
Logger.error ~s|Need to configure :coherence, :email_from_name, "Name", and :email_from_email, "[email protected]"|
nil
{name, email} = email_tuple ->
if is_nil(name) or is_nil(email) do
Logger.error ~s|Need to configure :coherence, :email_from_name, "Name", and :email_from_email, "[email protected]"|
nil
else
email_tuple
end
end
end
end
| 29.527473 | 122 | 0.652028 |
f7af679172dddc7d679be564ec35d0bbbb54535c | 1,422 | ex | Elixir | lib/absinthe/phase/schema/arguments/data.ex | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 1 | 2019-10-10T02:57:52.000Z | 2019-10-10T02:57:52.000Z | lib/absinthe/phase/schema/arguments/data.ex | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 1 | 2019-09-23T21:26:01.000Z | 2019-09-23T21:26:01.000Z | lib/absinthe/phase/schema/arguments/data.ex | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | null | null | null | defmodule Absinthe.Phase.Schema.Arguments.Data do
@moduledoc false
# Populate all arguments in the SDL with their provided data values.
#
# See Absinthe.Phase.Document.Arguments.Data for a more expansive
# explanation; this phase limits itself to arguments and values.
alias Absinthe.Blueprint.Input
alias Absinthe.{Blueprint}
use Absinthe.Phase
def run(input, _options \\ []) do
# By using a postwalk we can worry about leaf nodes first (scalars, enums),
# and then for list and objects merely grab the data values.
result = Blueprint.postwalk(input, &handle_node/1)
{:ok, result}
end
def handle_node(%Input.Argument{input_value: input} = node) do
%{node | value: input.data}
end
def handle_node(%Input.Value{normalized: %Input.List{items: items}} = node) do
data_list = for %{data: data} = item <- items, Input.Value.valid?(item), do: data
%{node | data: data_list}
end
def handle_node(%Input.Value{normalized: %Input.Object{fields: fields}} = node) do
data =
for field <- fields, include_field?(field), into: %{} do
{field.schema_node.identifier, field.input_value.data}
end
%{node | data: data}
end
def handle_node(node) do
node
end
defp include_field?(%{input_value: %{normalized: %Input.Null{}}}), do: true
defp include_field?(%{input_value: %{data: nil}}), do: false
defp include_field?(_), do: true
end
| 30.913043 | 85 | 0.68917 |
f7af6bd8b1793f441b2fbfc33f219a1e1fe99b4f | 619 | ex | Elixir | lib/binance/futures/schemas/position.ex | Fadhil/binance.ex | b4cda870c9fab475e43f2498f8f28dec0353e952 | [
"MIT"
] | null | null | null | lib/binance/futures/schemas/position.ex | Fadhil/binance.ex | b4cda870c9fab475e43f2498f8f28dec0353e952 | [
"MIT"
] | null | null | null | lib/binance/futures/schemas/position.ex | Fadhil/binance.ex | b4cda870c9fab475e43f2498f8f28dec0353e952 | [
"MIT"
] | 1 | 2021-02-22T22:56:22.000Z | 2021-02-22T22:56:22.000Z | defmodule Binance.Futures.Schemas.Position do
defstruct [
:entry_price,
:margin_type,
:is_auto_add_margin,
:leverage,
:liquidation_price,
:mark_price,
:max_notional_value,
:position_amt,
:symbol,
:un_realized_profit,
:position_side
]
use ExConstructor
def new(map_or_kwlist) do
position =
map_or_kwlist
|> Map.put("isAutoAddMargin", boolean_from_string(map_or_kwlist["isAutoAddMargin"]))
ExConstructor.populate_struct(%__MODULE__{}, position)
end
def boolean_from_string("true"), do: true
def boolean_from_string("false"), do: false
end
| 21.344828 | 90 | 0.702746 |
f7af7132f36335ea3ebfad7d3a8ffebabc4ac13f | 99 | exs | Elixir | test/nprx_test.exs | silbermm/nprx | 8a889a86688fbb0a1bc32e0310b3ba999c8f8394 | [
"BSD-3-Clause"
] | null | null | null | test/nprx_test.exs | silbermm/nprx | 8a889a86688fbb0a1bc32e0310b3ba999c8f8394 | [
"BSD-3-Clause"
] | null | null | null | test/nprx_test.exs | silbermm/nprx | 8a889a86688fbb0a1bc32e0310b3ba999c8f8394 | [
"BSD-3-Clause"
] | null | null | null | defmodule NPRTest do
use ExUnit.Case
doctest NPRx
import Mock
alias NPRx.TestHelpers
end
| 11 | 24 | 0.757576 |
f7af84ff7d54f70e39591ff5b2a162f7cacc0fb7 | 245 | ex | Elixir | lib/bennu/componentable.ex | coingaming/bennu | 9e8095b338e77bc6ea230129960bb824e1c552fd | [
"MIT"
] | 2 | 2019-12-23T21:11:24.000Z | 2020-01-23T22:15:17.000Z | lib/bennu/componentable.ex | coingaming/bennu | 9e8095b338e77bc6ea230129960bb824e1c552fd | [
"MIT"
] | null | null | null | lib/bennu/componentable.ex | coingaming/bennu | 9e8095b338e77bc6ea230129960bb824e1c552fd | [
"MIT"
] | 1 | 2021-02-03T17:56:58.000Z | 2021-02-03T17:56:58.000Z | defprotocol Bennu.Componentable do
require Bennu.Componentable.Schema, as: Schema
@type t :: Bennu.Component.t()
@spec input_schema(t) :: Schema.t()
def input_schema(t)
@spec output_schema(t) :: Schema.t()
def output_schema(t)
end
| 22.272727 | 48 | 0.714286 |
f7afc872e1d4cb1c2b5afe679214118708ca1c4b | 2,855 | exs | Elixir | test/parallel_stream/reject_test.exs | beatrichartz/parallel_stream | 60bd628f378ff3340a2ea3cb86aa2a3d70500d2a | [
"MIT"
] | 90 | 2015-10-17T22:24:07.000Z | 2022-02-20T16:29:34.000Z | test/parallel_stream/reject_test.exs | beatrichartz/parallel_stream | 60bd628f378ff3340a2ea3cb86aa2a3d70500d2a | [
"MIT"
] | 17 | 2016-04-14T13:35:27.000Z | 2022-02-25T20:13:18.000Z | test/parallel_stream/reject_test.exs | beatrichartz/parallel_stream | 60bd628f378ff3340a2ea3cb86aa2a3d70500d2a | [
"MIT"
] | 24 | 2016-01-28T12:31:09.000Z | 2021-09-27T16:30:06.000Z | defmodule ParallelStream.RejectTest do
use ExUnit.Case, async: true
@moduletag timeout: 1000
test ".reject filters a stream of variable length" do
result =
1..5
|> ParallelStream.reject(fn i -> i |> rem(2) == 0 end)
|> Enum.into([])
assert result == [1, 3, 5]
end
test ".reject kills all processes after it is done" do
{:links, links_before} = Process.info(self(), :links)
1..5
|> ParallelStream.reject(fn i -> i |> rem(2) == 0 end)
|> Enum.into([])
:timer.sleep(10)
{:links, links_after} = Process.info(self(), :links)
assert links_before == links_after
end
test ".reject is repeatable" do
stream =
1..5
|> ParallelStream.reject(fn i -> i |> rem(2) == 0 end)
result1 = stream |> Enum.into([])
assert result1 == [1, 3, 5]
result2 = stream |> Enum.into([])
assert result2 == [1, 3, 5]
end
test ".reject filters a stream of zero length" do
result =
[]
|> ParallelStream.filter(fn i -> i |> rem(2) == 0 end)
|> Enum.into([])
assert result == []
end
test ".reject does propagate errors via links" do
trap = Process.flag(:trap_exit, true)
pid =
spawn_link(fn ->
[1, 2]
|> ParallelStream.filter(fn i ->
if i |> rem(2) == 0 do
raise RuntimeError
end
end)
|> Enum.into([])
end)
assert_receive {:EXIT, ^pid, {%RuntimeError{}, _}}
Process.exit(pid, :kill)
refute Process.alive?(pid)
Process.flag(:trap_exit, trap)
end
test ".reject rejects the stream in order" do
result =
1..1000
|> ParallelStream.reject(fn i -> i |> rem(2) == 0 end)
|> Enum.into([])
assert result == 1..1000 |> Enum.reject(fn i -> i |> rem(2) == 0 end)
end
test ".reject parallelizes the filter function" do
{microseconds, :ok} =
:timer.tc(fn ->
1..5
|> ParallelStream.reject(fn _ -> :timer.sleep(10) end)
|> Stream.run()
end)
assert microseconds < 50000
end
test ".reject parallelizes the filter function with the number of parallel streams defined" do
{microseconds, :ok} =
:timer.tc(fn ->
1..12
|> ParallelStream.reject(fn _ -> :timer.sleep(10) end, num_workers: 12)
|> Stream.run()
end)
assert microseconds < 120_000
end
test ".reject parallelizes the filter function with work sharing" do
{microseconds, :ok} =
:timer.tc(fn ->
1..500
|> ParallelStream.reject(
fn i ->
if rem(i, 20) == 10 do
:timer.sleep(10)
false
else
:timer.sleep(1)
true
end
end,
num_workers: 50
)
|> Stream.run()
end)
assert microseconds < 110_000
end
end
| 23.211382 | 96 | 0.545709 |
f7b00b806fb02870e9d9fc9cf6b489d7f60e89c2 | 2,720 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v28/model/creative_rotation.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v28/model/creative_rotation.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v28/model/creative_rotation.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.DFAReporting.V28.Model.CreativeRotation do
@moduledoc """
Creative Rotation.
## Attributes
- creativeAssignments ([CreativeAssignment]): Creative assignments in this creative rotation. Defaults to: `null`.
- creativeOptimizationConfigurationId (String.t): Creative optimization configuration that is used by this ad. It should refer to one of the existing optimization configurations in the ad's campaign. If it is unset or set to 0, then the campaign's default optimization configuration will be used for this ad. Defaults to: `null`.
- type (String.t): Type of creative rotation. Can be used to specify whether to use sequential or random rotation. Defaults to: `null`.
- Enum - one of [CREATIVE_ROTATION_TYPE_RANDOM, CREATIVE_ROTATION_TYPE_SEQUENTIAL]
- weightCalculationStrategy (String.t): Strategy for calculating weights. Used with CREATIVE_ROTATION_TYPE_RANDOM. Defaults to: `null`.
- Enum - one of [WEIGHT_STRATEGY_CUSTOM, WEIGHT_STRATEGY_EQUAL, WEIGHT_STRATEGY_HIGHEST_CTR, WEIGHT_STRATEGY_OPTIMIZED]
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:creativeAssignments => list(GoogleApi.DFAReporting.V28.Model.CreativeAssignment.t()),
:creativeOptimizationConfigurationId => any(),
:type => any(),
:weightCalculationStrategy => any()
}
field(
:creativeAssignments,
as: GoogleApi.DFAReporting.V28.Model.CreativeAssignment,
type: :list
)
field(:creativeOptimizationConfigurationId)
field(:type)
field(:weightCalculationStrategy)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V28.Model.CreativeRotation do
def decode(value, options) do
GoogleApi.DFAReporting.V28.Model.CreativeRotation.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V28.Model.CreativeRotation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.5 | 339 | 0.760662 |
f7b01d6b08eadb0da1c20fbd1fab77bf6b7e10b6 | 3,952 | ex | Elixir | lib/sobelow/xss/raw.ex | mrsmkl/sobelow | b17b99617b91369acbd8db56b1341876fe3c8224 | [
"Apache-2.0"
] | null | null | null | lib/sobelow/xss/raw.ex | mrsmkl/sobelow | b17b99617b91369acbd8db56b1341876fe3c8224 | [
"Apache-2.0"
] | 3 | 2021-06-20T14:51:14.000Z | 2021-06-25T00:56:11.000Z | deps/sobelow/lib/sobelow/xss/raw.ex | carlosviana/blog | 1dcf58c3ca40bc3a7105d75de6f51954eb44bca8 | [
"MIT"
] | null | null | null | defmodule Sobelow.XSS.Raw do
@uid 30
@finding_type "XSS.Raw: XSS"
use Sobelow.Finding
def run(fun, meta_file, _, nil) do
confidence = if !meta_file.is_controller?, do: :low
Finding.init(@finding_type, meta_file.filename, confidence)
|> Finding.multi_from_def(fun, parse_raw_def(fun))
|> Enum.each(&Print.add_finding(&1))
end
def run(fun, meta_file, _web_root, controller) do
{vars, _, {fun_name, line_no}} = parse_render_def(fun)
filename = meta_file.filename
templates = Sobelow.MetaLog.get_templates()
tmp_template_root =
templates
|> Map.keys()
|> List.first()
template_root =
case tmp_template_root do
nil -> ""
path -> String.split(path, "/templates/") |> List.first()
end
Enum.each(vars, fn {finding, {template, ref_vars, vars}} ->
template =
cond do
is_atom(template) -> Atom.to_string(template) <> ".html"
is_binary(template) -> template
true -> ""
end
template_path =
(template_root <> "/templates/" <> controller <> "/" <> template <> ".eex")
|> Utils.normalize_path()
raw_funs = templates[template_path]
if raw_funs do
raw_vals = Parse.get_template_vars(raw_funs.raw)
Enum.each(ref_vars, fn var ->
var = "@#{var}"
if Enum.member?(raw_vals, var) do
Sobelow.MetaLog.delete_raw(var, template_path)
t_name = String.replace_prefix(Path.expand(template_path, ""), "/", "")
add_finding(t_name, line_no, filename, fun_name, fun, var, :high, finding)
end
end)
Enum.each(vars, fn var ->
var = "@#{var}"
if Enum.member?(raw_vals, var) do
Sobelow.MetaLog.delete_raw(var, template_path)
t_name = String.replace_prefix(Path.expand(template_path, ""), "/", "")
add_finding(t_name, line_no, filename, fun_name, fun, var, :medium, finding)
end
end)
end
end)
end
def parse_render_def(fun) do
{params, {fun_name, line_no}} = Parse.get_fun_declaration(fun)
pipefuns =
Parse.get_pipe_funs(fun)
|> Enum.map(fn {_, _, opts} -> Enum.at(opts, 1) end)
|> Enum.flat_map(&Parse.get_funs_of_type(&1, :render))
pipevars =
pipefuns
|> Enum.map(&{&1, Parse.parse_render_opts(&1, params, 0)})
|> List.flatten()
vars =
(Parse.get_funs_of_type(fun, :render) -- pipefuns)
|> Enum.map(&{&1, Parse.parse_render_opts(&1, params, 1)})
{vars ++ pipevars, params, {fun_name, line_no}}
end
def parse_raw_def(fun) do
Parse.get_fun_vars_and_meta(fun, 0, :raw, :HTML)
end
def details() do
Sobelow.XSS.details()
end
defp add_finding(t_name, line_no, filename, fun_name, fun, var, severity, finding) do
finding =
%Finding{
type: @finding_type,
filename: filename,
fun_source: fun,
vuln_source: finding,
vuln_variable: var,
vuln_line_no: Parse.get_fun_line(finding),
vuln_col_no: Parse.get_fun_column(finding),
confidence: severity
}
|> Finding.fetch_fingerprint()
case Sobelow.format() do
"json" ->
json_finding = [
type: finding.type,
file: finding.filename,
variable: "#{finding.vuln_variable}",
template: "#{t_name}",
line: finding.vuln_line_no
]
Sobelow.log_finding(json_finding, finding)
"txt" ->
Sobelow.log_finding(finding)
Print.print_custom_finding_metadata(finding, [
Print.finding_file_name(filename),
Print.finding_line(finding.vuln_source),
Print.finding_fun_metadata(fun_name, line_no),
"Template: #{t_name} - #{var}"
])
"compact" ->
Print.log_compact_finding(finding)
_ ->
Sobelow.log_finding(finding)
end
end
end
| 27.636364 | 88 | 0.597672 |
f7b02e8074385c5b08456baea126ad22219fd1db | 731 | ex | Elixir | lib/ex_rets/rets_client.ex | jdav-dev/ex_rets | 16eb6a1adc5d1d1eb259f86d6b09080c3c1068bf | [
"Apache-2.0"
] | 1 | 2019-12-20T14:23:19.000Z | 2019-12-20T14:23:19.000Z | lib/ex_rets/rets_client.ex | jdav-dev/ex_rets | 16eb6a1adc5d1d1eb259f86d6b09080c3c1068bf | [
"Apache-2.0"
] | null | null | null | lib/ex_rets/rets_client.ex | jdav-dev/ex_rets | 16eb6a1adc5d1d1eb259f86d6b09080c3c1068bf | [
"Apache-2.0"
] | null | null | null | defmodule ExRets.RetsClient do
@moduledoc false
@moduledoc since: "0.1.0"
alias ExRets.Credentials
alias ExRets.HttpClient
alias ExRets.LoginResponse
alias ExRets.Middleware
@typedoc since: "0.1.0"
@opaque t :: %__MODULE__{
credentials: Credentials.t(),
http_client: HttpClient.client(),
http_client_implementation: Httpc | Mock,
http_timeout: non_neg_integer() | :infinity,
login_response: LoginResponse.t(),
middleware: [Middleware.t()]
}
@derive {Inspect, only: [:credentials]}
defstruct [
:credentials,
:http_client,
:http_client_implementation,
:http_timeout,
:login_response,
:middleware
]
end
| 24.366667 | 56 | 0.644323 |
f7b03b302721cb5d77ae443001eb2006d8a3dd4c | 1,794 | ex | Elixir | clients/books/lib/google_api/books/v1/model/volumes.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/books/lib/google_api/books/v1/model/volumes.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/books/lib/google_api/books/v1/model/volumes.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Books.V1.Model.Volumes do
@moduledoc """
## Attributes
* `items` (*type:* `list(GoogleApi.Books.V1.Model.Volume.t)`, *default:* `nil`) - A list of volumes.
* `kind` (*type:* `String.t`, *default:* `nil`) - Resource type.
* `totalItems` (*type:* `integer()`, *default:* `nil`) - Total number of volumes found. This might be greater than the number of volumes returned in this response if results have been paginated.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:items => list(GoogleApi.Books.V1.Model.Volume.t()) | nil,
:kind => String.t() | nil,
:totalItems => integer() | nil
}
field(:items, as: GoogleApi.Books.V1.Model.Volume, type: :list)
field(:kind)
field(:totalItems)
end
defimpl Poison.Decoder, for: GoogleApi.Books.V1.Model.Volumes do
def decode(value, options) do
GoogleApi.Books.V1.Model.Volumes.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Books.V1.Model.Volumes do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.849057 | 198 | 0.702899 |
f7b0b2e9e8f6e36cb419a50e0db87c5e6334d7fb | 736 | exs | Elixir | restserver_application/mix.exs | arquitecturas-concurrentes/iasc-otp-elixir-2019c2 | c8c6c88db978785f439596e0b5f582473b54a35f | [
"BSD-3-Clause"
] | null | null | null | restserver_application/mix.exs | arquitecturas-concurrentes/iasc-otp-elixir-2019c2 | c8c6c88db978785f439596e0b5f582473b54a35f | [
"BSD-3-Clause"
] | null | null | null | restserver_application/mix.exs | arquitecturas-concurrentes/iasc-otp-elixir-2019c2 | c8c6c88db978785f439596e0b5f582473b54a35f | [
"BSD-3-Clause"
] | null | null | null | defmodule RESTServerApplication.Mixfile do
use Mix.Project
def project do
[app: :restserver_application,
version: "0.0.1",
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger],
mod: {RESTServer.Application, []}]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[]
end
end
| 21.647059 | 77 | 0.623641 |
f7b0ba731d13a8fb10e74d400107c82bedd0e6ae | 1,869 | exs | Elixir | clients/workflows/mix.exs | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/workflows/mix.exs | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/workflows/mix.exs | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Workflows.Mixfile do
use Mix.Project
@version "0.3.3"
def project() do
[
app: :google_api_workflows,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/workflows"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Workflows API client library. Manage workflow definitions. To execute workflows and manage executions, see the Workflows Executions API.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/workflows",
"Homepage" => "https://cloud.google.com/workflows"
}
]
end
end
| 27.895522 | 140 | 0.659176 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.