hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
791d1b28306da2a2c0510c7887e4aa39004c509d | 3,189 | exs | Elixir | test/lindaex_test.exs | kemonomachi/lindaex | 7b80362b3c6e1e32531562b445d5ac1dbfc909dd | [
"WTFPL"
] | null | null | null | test/lindaex_test.exs | kemonomachi/lindaex | 7b80362b3c6e1e32531562b445d5ac1dbfc909dd | [
"WTFPL"
] | null | null | null | test/lindaex_test.exs | kemonomachi/lindaex | 7b80362b3c6e1e32531562b445d5ac1dbfc909dd | [
"WTFPL"
] | null | null | null | defmodule LindaExTest do
use ExUnit.Case
setup_all do
test_tuples = [
{:"Katja-sama", 11, "Seikon no Qwaser", "Ekaterina Kurae"},
{:Horo, "Ookami to Koushinryou"},
{:Eclaire, 14, "Dog Days"},
{:Shiro, 11, "No Game, no Life"}
]
{:ok, test_tuples: test_tuples}
end
setup %{test_tuples: test_tuples} do
LindaEx.take_all :empty, :"_"
LindaEx.take_all :test, :"_"
Enum.each test_tuples, &LindaEx.write(:test, &1)
:ok
end
test "tuples are counted correctly", %{test_tuples: test_tuples} do
assert LindaEx.count(:test) == length(test_tuples)
end
test "reading a tuple in the space", %{test_tuples: test_tuples} do
expected = List.first test_tuples
assert LindaEx.read(:test, expected, :noblock) === expected
end
test "writing a tuple to the space", %{test_tuples: test_tuples} do
expected = List.first test_tuples
LindaEx.write :empty, expected
assert LindaEx.read(:empty, expected, :noblock) === expected
assert LindaEx.count(:empty) == 1
end
test "taking a tuple from the space", %{test_tuples: test_tuples} do
expected = List.first test_tuples
LindaEx.write :empty, expected
assert LindaEx.take(:empty, expected, :noblock) === expected
assert LindaEx.count(:empty) == 0
end
test "wildcard template matches all tuples", %{test_tuples: test_tuples} do
tuples = LindaEx.read_all :test, :"_"
assert Enum.sort(tuples) === Enum.sort(test_tuples)
end
test "take_all with wildcard template takes all tuples from the space", %{test_tuples: test_tuples} do
tuples = LindaEx.take_all :test, :"_"
assert Enum.sort(tuples) === Enum.sort(test_tuples)
assert LindaEx.count(:test) == 0
end
test "tuple matching with types work (non-recursive)" do
types = [
{{:"$atom"}, &is_atom/1},
{{:"$binary"}, &is_binary/1},
{{:"$string"}, &is_binary/1},
{{:"$float"}, &is_float/1},
{{:"$function"}, &is_function/1},
{{:"$int"}, &is_integer/1},
{{:"$integer"}, &is_integer/1},
{{:"$list"}, &is_list/1},
{{:"$number"}, &is_number/1},
{{:"$pid"}, &is_pid/1},
#{{"$port"}, &is_port/1},
#{{"$reference"}, &is_reference/1},
{{:"$tuple"}, &is_tuple/1}
]
data = [
{:atom},
{<<0, 245, 13>>},
{"string"},
{5.3},
{fn -> nil end},
{13},
{2},
{[]},
{-1},
{self},
{{}}
]
Enum.each data, &LindaEx.write(:empty, &1)
Enum.each types, fn({type, predicate}) ->
{item} = LindaEx.take :empty, type
assert predicate.(item)
end
end
test "tuple matching with wildcards work", %{test_tuples: test_tuples} do
expected = Enum.filter test_tuples, &(tuple_size(&1) == 3)
tuples = LindaEx.take_all :test, {:"_", :"_", :"_"}
assert Enum.sort(tuples) === Enum.sort(expected)
end
test "match_spec variables have no special meaning (non-recursive)" do
expected = {:match_variables, :"$1", :"$13"}
LindaEx.write :test, expected
assert LindaEx.take(:test, expected, :noblock) === expected
assert is_nil(LindaEx.take(:test, expected, :noblock))
end
end
| 25.926829 | 104 | 0.602697 |
791d306d384e978021c7d8b88f5ab1839578ea56 | 769 | ex | Elixir | lib/system/rabbitmq/consumers.ex | whitfin/extreme_system | a19830db1a4e2128517951f2eddffa9296317382 | [
"MIT"
] | null | null | null | lib/system/rabbitmq/consumers.ex | whitfin/extreme_system | a19830db1a4e2128517951f2eddffa9296317382 | [
"MIT"
] | null | null | null | lib/system/rabbitmq/consumers.ex | whitfin/extreme_system | a19830db1a4e2128517951f2eddffa9296317382 | [
"MIT"
] | null | null | null | defmodule Extreme.System.RabbitMQ.Consumers do
use Supervisor
def start_link(channel_manager, configuration, opts),
do: Supervisor.start_link(__MODULE__, {channel_manager, configuration}, opts)
def init({channel_manager, configuration}) do
children = configuration
|> Enum.map(&define_worker(channel_manager, &1))
supervise children, strategy: :one_for_one
end
defp define_worker(channel_manager, {:publisher, name, targets}),
do: worker( Extreme.System.RabbitMQ.Publisher, [channel_manager, name, targets, [name: name]], id: name)
defp define_worker(channel_manager, {:listener, name, definition}),
do: worker( Extreme.System.RabbitMQ.Listener, [channel_manager, name, definition, [name: name]], id: name)
end
| 38.45 | 111 | 0.73212 |
791d395390685f2b9ec603f786333a7d0aefb0c1 | 13,911 | exs | Elixir | apps/ewallet/test/ewallet/gates/transaction_gate_test.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet/test/ewallet/gates/transaction_gate_test.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet/test/ewallet/gates/transaction_gate_test.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWallet.TransactionGateTest do
use EWallet.DBCase, async: true
import EWalletDB.Factory
alias Ecto.UUID
alias EWallet.{BalanceFetcher, TransactionGate}
alias EWalletDB.{Account, Token, Transaction, User, Wallet}
alias ActivityLogger.System
def init_wallet(address, token, amount \\ 1_000) do
master_account = Account.get_master_account()
master_wallet = Account.get_primary_wallet(master_account)
mint!(token)
transfer!(master_wallet.address, address, token, amount * token.subunit_to_unit)
end
describe "create/1" do
def insert_addresses_records do
{:ok, user1} = User.insert(params_for(:user))
{:ok, user2} = User.insert(params_for(:user))
{:ok, token} = Token.insert(params_for(:token))
wallet1 = User.get_primary_wallet(user1)
wallet2 = User.get_primary_wallet(user2)
{wallet1, wallet2, token}
end
defp build_addresses_attrs(idempotency_token, wallet1, wallet2, token) do
%{
"from_address" => wallet1.address,
"to_address" => wallet2.address,
"token_id" => token.id,
"amount" => 100 * token.subunit_to_unit,
"metadata" => %{some: "data"},
"idempotency_token" => idempotency_token,
"originator" => %System{}
}
end
def insert_transaction_with_addresses(%{
metadata: metadata,
response: response,
status: status
}) do
idempotency_token = UUID.generate()
{wallet1, wallet2, token} = insert_addresses_records()
attrs = build_addresses_attrs(idempotency_token, wallet1, wallet2, token)
{:ok, transaction} =
Transaction.get_or_insert(%{
idempotency_token: idempotency_token,
from_user_uuid: wallet1.user_uuid,
to_user_uuid: wallet2.user_uuid,
from: wallet1.address,
to: wallet2.address,
from_amount: 100 * token.subunit_to_unit,
from_token_uuid: token.uuid,
to_amount: 100 * token.subunit_to_unit,
to_token_uuid: token.uuid,
metadata: metadata,
payload: attrs,
local_ledger_uuid: response["local_ledger_uuid"],
error_code: response["code"],
error_description: response["description"],
error_data: nil,
status: status,
type: Transaction.internal(),
originator: %System{}
})
{idempotency_token, transaction, attrs}
end
test "returns the transaction ledger response when idempotency token is present and
transaction is confirmed" do
{idempotency_token, inserted_transaction, attrs} =
insert_transaction_with_addresses(%{
metadata: %{some: "data"},
response: %{"local_ledger_uuid" => "from cached ledger"},
status: Transaction.confirmed()
})
assert inserted_transaction.status == Transaction.confirmed()
{status, transaction} = TransactionGate.create(attrs)
assert status == :ok
assert inserted_transaction.id == transaction.id
assert transaction.idempotency_token == idempotency_token
assert transaction.status == Transaction.confirmed()
assert transaction.local_ledger_uuid == "from cached ledger"
end
test "returns the transaction ledger response when idempotency token is present and
transaction is failed" do
{idempotency_token, inserted_transaction, attrs} =
insert_transaction_with_addresses(%{
metadata: %{some: "data"},
response: %{"code" => "code!", "description" => "description!"},
status: Transaction.failed()
})
assert inserted_transaction.status == Transaction.failed()
{status, transaction, code, description} = TransactionGate.create(attrs)
assert status == :error
assert code == "code!"
assert description == "description!"
assert inserted_transaction.id == transaction.id
assert transaction.idempotency_token == idempotency_token
assert transaction.status == Transaction.failed()
assert transaction.error_code == "code!"
assert transaction.error_description == "description!"
end
test "resend the request to the ledger when idempotency token is present and
transaction is pending" do
{idempotency_token, inserted_transaction, attrs} =
insert_transaction_with_addresses(%{
metadata: %{some: "data"},
response: nil,
status: Transaction.pending()
})
assert inserted_transaction.status == Transaction.pending()
init_wallet(inserted_transaction.from, inserted_transaction.from_token, 1_000)
{status, transaction} = TransactionGate.create(attrs)
assert status == :ok
assert inserted_transaction.id == transaction.id
assert transaction.idempotency_token == idempotency_token
assert transaction.status == Transaction.confirmed()
end
test "creates and fails a transaction when idempotency token is not present and the ledger
returned an error" do
idempotency_token = UUID.generate()
{wallet1, wallet2, token} = insert_addresses_records()
attrs = build_addresses_attrs(idempotency_token, wallet1, wallet2, token)
{status, transaction, code, _description} = TransactionGate.create(attrs)
assert status == :error
assert transaction.status == Transaction.failed()
assert code == "insufficient_funds"
transaction = Transaction.get_by(%{idempotency_token: idempotency_token})
assert transaction.idempotency_token == idempotency_token
assert transaction.status == Transaction.failed()
assert transaction.payload == %{
"from_address" => wallet1.address,
"to_address" => wallet2.address,
"token_id" => token.id,
"amount" => 100 * token.subunit_to_unit,
"metadata" => %{"some" => "data"},
"idempotency_token" => idempotency_token
}
assert transaction.error_code == "insufficient_funds"
assert %{
"address" => _,
"current_amount" => _,
"amount_to_debit" => _,
"token_id" => _
} = transaction.error_data
assert transaction.metadata == %{"some" => "data"}
end
test "creates and confirms a transaction when idempotency token does not exist" do
idempotency_token = UUID.generate()
{wallet1, wallet2, token} = insert_addresses_records()
attrs = build_addresses_attrs(idempotency_token, wallet1, wallet2, token)
init_wallet(wallet1.address, token, 1_000)
{status, _transaction} = TransactionGate.create(attrs)
assert status == :ok
transaction = Transaction.get_by(%{idempotency_token: idempotency_token})
assert transaction.idempotency_token == idempotency_token
assert transaction.status == Transaction.confirmed()
assert transaction.payload == %{
"from_address" => wallet1.address,
"to_address" => wallet2.address,
"token_id" => token.id,
"amount" => 100 * token.subunit_to_unit,
"metadata" => %{"some" => "data"},
"idempotency_token" => idempotency_token
}
assert transaction.local_ledger_uuid != nil
assert transaction.metadata == %{"some" => "data"}
end
test "gets back an 'amount_is_zero' error when amount sent is 0" do
idempotency_token = UUID.generate()
{wallet1, wallet2, token} = insert_addresses_records()
{res, transaction, code, _description} =
TransactionGate.create(%{
"from_address" => wallet1.address,
"to_address" => wallet2.address,
"token_id" => token.id,
"amount" => 0,
"metadata" => %{some: "data"},
"idempotency_token" => idempotency_token,
"originator" => %System{}
})
assert res == :error
assert transaction.status == Transaction.failed()
assert code == "amount_is_zero"
end
test "build, format and send the transaction to the local ledger" do
idempotency_token = UUID.generate()
{wallet1, wallet2, token} = insert_addresses_records()
attrs = build_addresses_attrs(idempotency_token, wallet1, wallet2, token)
init_wallet(wallet1.address, token, 1_000)
{status, transaction} = TransactionGate.create(attrs)
assert status == :ok
assert transaction.idempotency_token == idempotency_token
assert transaction.from == wallet1.address
assert transaction.to == wallet2.address
assert token.id == token.id
end
test "fails to create the transaction when the token is disabled" do
idempotency_token = UUID.generate()
{wallet1, wallet2, token} = insert_addresses_records()
attrs = build_addresses_attrs(idempotency_token, wallet1, wallet2, token)
init_wallet(wallet1.address, token, 1_000)
{:ok, _token} =
Token.enable_or_disable(token, %{
enabled: false,
originator: %System{}
})
{status, code} = TransactionGate.create(attrs)
assert status == :error
assert code == :token_is_disabled
end
test "fails to create the transaction when the from_wallet is disabled" do
idempotency_token = UUID.generate()
{_wallet1, wallet2, token} = insert_addresses_records()
account = Account.get_master_account()
{:ok, wallet3} =
Wallet.insert_secondary_or_burn(%{
"account_uuid" => account.uuid,
"name" => "MySecondary",
"identifier" => "secondary",
"originator" => %System{}
})
attrs = build_addresses_attrs(idempotency_token, wallet3, wallet2, token)
init_wallet(wallet3.address, token, 1_000)
{:ok, _wallet3} =
Wallet.enable_or_disable(wallet3, %{
enabled: false,
originator: %System{}
})
{status, code} = TransactionGate.create(attrs)
assert status == :error
assert code == :from_wallet_is_disabled
end
test "fails to create the transaction when the to_wallet is disabled" do
idempotency_token = UUID.generate()
{wallet1, _wallet2, token} = insert_addresses_records()
account = Account.get_master_account()
{:ok, wallet3} =
Wallet.insert_secondary_or_burn(%{
"account_uuid" => account.uuid,
"name" => "MySecondary",
"identifier" => "secondary",
"originator" => %System{}
})
attrs = build_addresses_attrs(idempotency_token, wallet1, wallet3, token)
init_wallet(wallet1.address, token, 1_000)
{:ok, _wallet3} =
Wallet.enable_or_disable(wallet3, %{
enabled: false,
originator: %System{}
})
{status, code} = TransactionGate.create(attrs)
assert status == :error
assert code == :to_wallet_is_disabled
end
end
describe "create/1 with exchange" do
test "exchanges funds between two users" do
account = Account.get_master_account()
wallet = Account.get_primary_wallet(account)
{:ok, user_1} = :user |> params_for() |> User.insert()
{:ok, user_2} = :user |> params_for() |> User.insert()
wallet_1 = User.get_primary_wallet(user_1)
wallet_2 = User.get_primary_wallet(user_2)
token_1 = insert(:token)
token_2 = insert(:token)
pair = insert(:exchange_pair, from_token: token_1, to_token: token_2, rate: 2)
mint!(token_1)
mint!(token_2)
initialize_wallet(wallet_1, 200_000, token_1)
{:ok, transaction} =
TransactionGate.create(%{
"idempotency_token" => UUID.generate(),
"from_user_id" => user_1.id,
"to_user_id" => user_2.id,
"from_token_id" => token_1.id,
"to_token_id" => token_2.id,
"from_amount" => 100 * token_1.subunit_to_unit,
"to_amount" => 200 * token_1.subunit_to_unit,
"exchange_account_id" => account.id,
"metadata" => %{something: "interesting"},
"encrypted_metadata" => %{something: "secret"},
"originator" => %System{}
})
{:ok, b1} = BalanceFetcher.get(token_1.id, wallet_1)
assert List.first(b1.balances).amount == (200_000 - 100) * token_1.subunit_to_unit
{:ok, b2} = BalanceFetcher.get(token_2.id, wallet_2)
assert List.first(b2.balances).amount == 200 * token_2.subunit_to_unit
assert transaction.from == wallet_1.address
assert transaction.to == wallet_2.address
assert transaction.from_user_uuid == user_1.uuid
assert transaction.to_user_uuid == user_2.uuid
assert transaction.from_account_uuid == nil
assert transaction.to_account_uuid == nil
assert transaction.from_token_uuid == token_1.uuid
assert transaction.to_token_uuid == token_2.uuid
assert transaction.from_amount == 100 * token_1.subunit_to_unit
assert transaction.to_amount == 200 * token_2.subunit_to_unit
assert transaction.rate == 2
assert transaction.exchange_pair_uuid == pair.uuid
assert transaction.exchange_account_uuid == account.uuid
assert transaction.exchange_wallet_address == wallet.address
end
end
end
| 36.607895 | 94 | 0.650636 |
791d406ab0b7e8284c9f8bc75cb926ca5e6cb752 | 2,847 | ex | Elixir | lib/glimesh_web/controllers/channel_moderator_controller.ex | Heiwa1580/glimesh.tv | c5e1ed4d1011b4e2a54c173d142e7eb857457477 | [
"MIT"
] | 1 | 2020-08-02T00:12:28.000Z | 2020-08-02T00:12:28.000Z | lib/glimesh_web/controllers/channel_moderator_controller.ex | Heiwa1580/glimesh.tv | c5e1ed4d1011b4e2a54c173d142e7eb857457477 | [
"MIT"
] | null | null | null | lib/glimesh_web/controllers/channel_moderator_controller.ex | Heiwa1580/glimesh.tv | c5e1ed4d1011b4e2a54c173d142e7eb857457477 | [
"MIT"
] | null | null | null | defmodule GlimeshWeb.ChannelModeratorController do
use GlimeshWeb, :controller
alias Glimesh.Streams
alias Glimesh.Streams.ChannelModerator
plug :put_layout, "user-sidebar.html"
def index(conn, _params) do
channel = Streams.get_channel_for_user(conn.assigns.current_user)
channel_moderators = Streams.list_channel_moderators(channel)
moderation_log = Streams.list_channel_moderation_log(channel)
render(conn, "index.html",
channel_moderators: channel_moderators,
moderation_log: moderation_log
)
end
def new(conn, _params) do
changeset = Streams.change_channel_moderator(%ChannelModerator{})
render(conn, "new.html", changeset: changeset)
end
def create(conn, %{"channel_moderator" => channel_moderator_params}) do
channel = Streams.get_channel_for_user(conn.assigns.current_user)
mod_user = Glimesh.Accounts.get_by_username(channel_moderator_params["username"])
case Streams.create_channel_moderator(channel, mod_user, channel_moderator_params) do
{:ok, channel_moderator} ->
conn
|> put_flash(:info, "Channel moderator created successfully.")
|> redirect(to: Routes.channel_moderator_path(conn, :show, channel_moderator))
{:error, %Ecto.Changeset{} = changeset} ->
render(conn, "new.html", changeset: changeset)
{:error_no_user, changeset} ->
conn = conn |> put_flash(:error, "Valid username is required.")
render(conn, "new.html", changeset: changeset)
end
end
def show(conn, %{"id" => id}) do
channel_moderator = Streams.get_channel_moderator!(id)
moderation_log = Streams.list_channel_moderation_log_for_mod(channel_moderator)
changeset = Streams.change_channel_moderator(channel_moderator)
render(conn, "show.html",
channel_moderator: channel_moderator,
changeset: changeset,
moderation_log: moderation_log
)
end
def update(conn, %{"id" => id, "channel_moderator" => channel_moderator_params}) do
channel_moderator = Streams.get_channel_moderator!(id)
case Streams.update_channel_moderator(channel_moderator, channel_moderator_params) do
{:ok, channel_moderator} ->
conn
|> put_flash(:info, "Channel moderator updated successfully.")
|> redirect(to: Routes.channel_moderator_path(conn, :show, channel_moderator))
{:error, %Ecto.Changeset{} = changeset} ->
render(conn, "edit.html", channel_moderator: channel_moderator, changeset: changeset)
end
end
def delete(conn, %{"id" => id}) do
channel_moderator = Streams.get_channel_moderator!(id)
{:ok, _channel_moderator} = Streams.delete_channel_moderator(channel_moderator)
conn
|> put_flash(:info, "Channel moderator deleted successfully.")
|> redirect(to: Routes.channel_moderator_path(conn, :index))
end
end
| 36.037975 | 93 | 0.723217 |
791d41fd561d0db97fe91cfb320e4f9f9e1165df | 485 | exs | Elixir | test/test_helper.exs | willfore/bootleg_phoenix | ec49516ff9b3d3e2c3ad13fa6026fb614e8c527e | [
"MIT"
] | null | null | null | test/test_helper.exs | willfore/bootleg_phoenix | ec49516ff9b3d3e2c3ad13fa6026fb614e8c527e | [
"MIT"
] | null | null | null | test/test_helper.exs | willfore/bootleg_phoenix | ec49516ff9b3d3e2c3ad13fa6026fb614e8c527e | [
"MIT"
] | null | null | null | unless System.get_env("TEST_LEAVE_TEMP"), do: Temp.track!
unless Docker.ready? do
IO.puts """
It seems like Docker isn't running?
Please check:
1. Docker is installed: `docker version`
2. On OS X and Windows: `docker-machine start`
3. Environment is set up: `eval $(docker-machine env)`
"""
exit({:shutdown, 1})
end
Docker.build!("bootleg-phoenix-test-sshd", "test/support/docker")
ExUnit.configure formatters: [JUnitFormatter, ExUnit.CLIFormatter]
ExUnit.start()
| 24.25 | 66 | 0.715464 |
791d48f37bf6186fabc29e0a540cf49c13ef0f09 | 277 | exs | Elixir | sample_server/config/test.exs | mjaric/finix | fb0dedfdfdd46927d3df239c7c45d7fe92c441c4 | [
"Apache-2.0"
] | 31 | 2019-03-26T15:26:21.000Z | 2022-02-16T14:33:13.000Z | sample_server/config/test.exs | mjaric/finix | fb0dedfdfdd46927d3df239c7c45d7fe92c441c4 | [
"Apache-2.0"
] | 3 | 2019-04-05T19:45:09.000Z | 2019-10-25T01:48:57.000Z | sample_server/config/test.exs | mjaric/finix | fb0dedfdfdd46927d3df239c7c45d7fe92c441c4 | [
"Apache-2.0"
] | 5 | 2019-03-27T14:16:28.000Z | 2022-02-18T12:01:46.000Z | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :sample_server, SampleServerWeb.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 25.181818 | 56 | 0.747292 |
791d693c89399ca2de059273cae7c279114999a0 | 7,837 | ex | Elixir | lib/jsonrpc2/server/handler.ex | drdgvhbh/jsonrpc2-elixir | 02d41b41e3625da1a81b8dd911a0b23bb99b1c9e | [
"Apache-2.0"
] | 49 | 2016-07-14T01:12:59.000Z | 2022-03-22T02:13:50.000Z | lib/jsonrpc2/server/handler.ex | drdgvhbh/jsonrpc2-elixir | 02d41b41e3625da1a81b8dd911a0b23bb99b1c9e | [
"Apache-2.0"
] | 18 | 2016-09-02T18:08:37.000Z | 2021-06-10T21:28:11.000Z | lib/jsonrpc2/server/handler.ex | drdgvhbh/jsonrpc2-elixir | 02d41b41e3625da1a81b8dd911a0b23bb99b1c9e | [
"Apache-2.0"
] | 26 | 2016-09-07T21:46:16.000Z | 2021-10-09T13:52:16.000Z | defmodule JSONRPC2.Server.Handler do
@moduledoc """
A transport-agnostic server handler for JSON-RPC 2.0.
## Example
defmodule SpecHandler do
use JSONRPC2.Server.Handler
def handle_request("subtract", [x, y]) do
x - y
end
def handle_request("subtract", %{"minuend" => x, "subtrahend" => y}) do
x - y
end
def handle_request("update", _) do
:ok
end
def handle_request("sum", numbers) do
Enum.sum(numbers)
end
def handle_request("get_data", []) do
["hello", 5]
end
end
SpecHandler.handle(~s({"jsonrpc": "2.0", "method": "subtract", "params": [42, 23], "id": 1}))
#=> ~s({"jsonrpc": "2.0", "result": 19, "id": 1})
"""
require Logger
@doc """
Respond to a request for `method` with `params`.
You can return any serializable result (which will be ignored for notifications), or you can throw
these values to produce error responses:
* `:method_not_found`, `:invalid_params`, `:internal_error`, `:server_error`
* any of the above, in a tuple like `{:method_not_found, %{my_error_data: 1}}` to return extra
data
* `{:jsonrpc2, code, message}` or `{:jsonrpc2, code, message, data}` to return a custom error,
with or without extra data.
"""
@callback handle_request(method :: JSONRPC2.method(), params :: JSONRPC2.params()) ::
JSONRPC2.json() | no_return
defmacro __using__(_) do
quote do
@spec handle(String.t()) :: {:reply, String.t()} | :noreply
def handle(json) do
serializer = Application.get_env(:jsonrpc2, :serializer)
unquote(__MODULE__).handle(__MODULE__, serializer, json)
end
end
end
@doc false
def handle(module, serializer, json) when is_binary(json) do
case serializer.decode(json) do
{:ok, decoded_request} ->
parse(decoded_request) |> collate_for_dispatch(module)
{:error, _error} ->
standard_error_response(:parse_error, nil)
{:error, :invalid, _number} ->
standard_error_response(:parse_error, nil)
end
|> encode_response(module, serializer, json)
end
def handle(module, serializer, json) do
parse(json)
|> collate_for_dispatch(module)
|> encode_response(module, serializer, json)
end
defp collate_for_dispatch(batch_rpc, module) when is_list(batch_rpc) and length(batch_rpc) > 0 do
merge_responses(Enum.map(batch_rpc, &dispatch(module, &1)))
end
defp collate_for_dispatch(rpc, module) do
dispatch(module, rpc)
end
defp parse(requests) when is_list(requests) do
for request <- requests, do: parse(request)
end
defp parse(request) when is_map(request) do
version = Map.get(request, "jsonrpc", :undefined)
method = Map.get(request, "method", :undefined)
params = Map.get(request, "params", [])
id = Map.get(request, "id", :undefined)
if valid_request?(version, method, params, id) do
{method, params, id}
else
:invalid_request
end
end
defp parse(_) do
:invalid_request
end
defp valid_request?(version, method, params, id) do
version == "2.0" and is_binary(method) and (is_list(params) or is_map(params)) and
(id in [:undefined, nil] or is_binary(id) or is_number(id))
end
defp merge_responses(responses) do
case for({:reply, reply} <- responses, do: reply) do
[] -> :noreply
replies -> {:reply, replies}
end
end
@throwable_errors [:method_not_found, :invalid_params, :internal_error, :server_error]
defp dispatch(module, {method, params, id}) do
try do
result_response(module.handle_request(method, params), id)
rescue
e in FunctionClauseError ->
# if that error originates from the very module.handle_request call - handle, otherwise - reraise
case e do
%FunctionClauseError{function: :handle_request, module: ^module} ->
standard_error_response(:method_not_found, %{method: method, params: params}, id)
other_e ->
stacktrace = System.stacktrace()
log_error(module, method, params, :error, other_e, stacktrace)
Kernel.reraise(other_e, stacktrace)
end
catch
:throw, error when error in @throwable_errors ->
standard_error_response(error, id)
:throw, {error, data} when error in @throwable_errors ->
standard_error_response(error, data, id)
:throw, {:jsonrpc2, code, message} when is_integer(code) and is_binary(message) ->
error_response(code, message, id)
:throw, {:jsonrpc2, code, message, data} when is_integer(code) and is_binary(message) ->
error_response(code, message, data, id)
kind, payload ->
stacktrace = System.stacktrace()
log_error(module, method, params, kind, payload, stacktrace)
standard_error_response(:internal_error, id)
end
end
defp dispatch(_module, _rpc) do
standard_error_response(:invalid_request, nil)
end
defp log_error(module, method, params, kind, payload, stacktrace) do
_ =
Logger.error([
"Error in handler ",
inspect(module),
" for method ",
method,
" with params: ",
inspect(params),
":\n\n",
Exception.format(kind, payload, stacktrace)
])
end
defp result_response(_result, :undefined) do
:noreply
end
defp result_response(result, id) do
{:reply,
%{
"jsonrpc" => "2.0",
"result" => result,
"id" => id
}}
end
defp standard_error_response(error_type, id) do
{code, message} = error_code_and_message(error_type)
error_response(code, message, id)
end
defp standard_error_response(error_type, data, id) do
{code, message} = error_code_and_message(error_type)
error_response(code, message, data, id)
end
defp error_response(_code, _message, _data, :undefined) do
:noreply
end
defp error_response(code, message, data, id) do
{:reply, error_reply(code, message, data, id)}
end
defp error_response(_code, _message, :undefined) do
:noreply
end
defp error_response(code, message, id) do
{:reply, error_reply(code, message, id)}
end
defp error_reply(code, message, data, id) do
%{
"jsonrpc" => "2.0",
"error" => %{
"code" => code,
"message" => message,
"data" => data
},
"id" => id
}
end
defp error_reply(code, message, id) do
%{
"jsonrpc" => "2.0",
"error" => %{
"code" => code,
"message" => message
},
"id" => id
}
end
defp error_code_and_message(:parse_error), do: {-32700, "Parse error"}
defp error_code_and_message(:invalid_request), do: {-32600, "Invalid Request"}
defp error_code_and_message(:method_not_found), do: {-32601, "Method not found"}
defp error_code_and_message(:invalid_params), do: {-32602, "Invalid params"}
defp error_code_and_message(:internal_error), do: {-32603, "Internal error"}
defp error_code_and_message(:server_error), do: {-32000, "Server error"}
defp encode_response(:noreply, _module, _serializer, _json) do
:noreply
end
defp encode_response({:reply, reply}, module, serializer, json) do
case serializer.encode(reply) do
{:ok, encoded_reply} ->
{:reply, encoded_reply}
{:error, reason} ->
_ =
Logger.info([
"Handler ",
inspect(module),
" returned invalid reply:\n Reason: ",
inspect(reason),
"\n Received: ",
inspect(reply),
"\n Request: ",
json
])
standard_error_response(:internal_error, nil)
|> encode_response(module, serializer, json)
end
end
end
| 28.292419 | 105 | 0.624729 |
791d6af4121bdc8da2ec571a66f3e705ab6beb98 | 86 | exs | Elixir | test/nuzzelish_web/views/layout_view_test.exs | chrisbodhi/nuzzelish | 4273dc34e5cc2eab11a6c512272b03a60f302e64 | [
"MIT"
] | 2 | 2020-09-07T03:42:36.000Z | 2021-05-04T23:58:43.000Z | test/nuzzelish_web/views/layout_view_test.exs | chrisbodhi/nuzzelish | 4273dc34e5cc2eab11a6c512272b03a60f302e64 | [
"MIT"
] | 20 | 2020-01-15T03:26:48.000Z | 2020-02-02T20:53:39.000Z | test/nuzzelish_web/views/layout_view_test.exs | chrisbodhi/nuzzelish | 4273dc34e5cc2eab11a6c512272b03a60f302e64 | [
"MIT"
] | null | null | null | defmodule NuzzelishWeb.LayoutViewTest do
use NuzzelishWeb.ConnCase, async: true
end
| 21.5 | 40 | 0.837209 |
791d7360b07bea925ec0adc9ea8f99ae116bc1c4 | 1,268 | ex | Elixir | lib/ttr_core/cards/train_card.ex | alakra/ticket-to-ride-core | 7886e1937d4f41b472e0bb3e2cc20ea4ef350085 | [
"MIT"
] | 6 | 2018-09-23T21:04:50.000Z | 2021-03-13T02:56:23.000Z | lib/ttr_core/cards/train_card.ex | alakra/ticket-to-ride-core | 7886e1937d4f41b472e0bb3e2cc20ea4ef350085 | [
"MIT"
] | 23 | 2018-08-24T17:09:18.000Z | 2021-08-02T12:15:23.000Z | lib/ttr_core/cards/train_card.ex | alakra/ticket-to-ride-core | 7886e1937d4f41b472e0bb3e2cc20ea4ef350085 | [
"MIT"
] | 1 | 2018-08-24T16:59:45.000Z | 2018-08-24T16:59:45.000Z | defmodule TtrCore.Cards.TrainCard do
@moduledoc false
@car_counts [
box: 12, # yellow
passenger: 12, # blue
tanker: 12, # orange
reefer: 12, # white
freight: 12, # pink
hopper: 12, # black
coal: 12, # red
caboose: 12, # green
locomotive: 14 # gold
]
@type t :: :box
| :passenger
| :tanker
| :reefer
| :freight
| :hopper
| :coal
| :caboose
| :locomotive
@type deck :: [t]
@type remaining :: deck()
@type selected :: deck()
# API
def shuffle, do: shuffle(@car_counts, [])
def shuffle([], deck), do: deck
def shuffle(source, deck) do
[{train, n}] = Enum.take_random(source, 1)
source
|> calculate_remainder(train, n)
|> shuffle([train|deck])
end
@spec draw(deck(), integer()) ::
{:ok, {remaining(), selected()}} |
{:error, :invalid_deal}
def draw(deck, n)
def draw(deck, 4), do: {:ok, Enum.split(deck, 4)}
def draw(deck, 2), do: {:ok, Enum.split(deck, 2)}
def draw(deck, 1), do: {:ok, Enum.split(deck, 1)}
def draw(_, _), do: {:error, :invalid_deal}
# Private
defp calculate_remainder(source, train, 1), do: Keyword.delete(source, train)
defp calculate_remainder(source, train, n), do: Keyword.put(source, train, n - 1)
end
| 22.642857 | 83 | 0.589117 |
791db6c26f1f8493e600a5ab15edf579ef0e56fe | 4,631 | exs | Elixir | test/plausible/stats/query_test.exs | plausible-insights/plausible | 88173342b9e969894879bfb2e8d203426f6a1b1c | [
"MIT"
] | 984 | 2019-09-02T11:36:41.000Z | 2020-06-08T06:25:48.000Z | test/plausible/stats/query_test.exs | plausible-insights/plausible | 88173342b9e969894879bfb2e8d203426f6a1b1c | [
"MIT"
] | 24 | 2019-09-10T09:53:17.000Z | 2020-06-08T07:35:26.000Z | test/plausible/stats/query_test.exs | plausible-insights/plausible | 88173342b9e969894879bfb2e8d203426f6a1b1c | [
"MIT"
] | 51 | 2019-09-03T10:48:10.000Z | 2020-06-07T00:23:34.000Z | defmodule Plausible.Stats.QueryTest do
use ExUnit.Case, async: true
alias Plausible.Stats.Query
@site_inserted_at ~D[2020-01-01]
@site %Plausible.Site{
timezone: "UTC",
inserted_at: @site_inserted_at,
stats_start_date: @site_inserted_at
}
test "parses day format" do
q = Query.from(@site, %{"period" => "day", "date" => "2019-01-01"})
assert q.date_range.first == ~D[2019-01-01]
assert q.date_range.last == ~D[2019-01-01]
assert q.interval == "hour"
end
test "day format defaults to today" do
q = Query.from(@site, %{"period" => "day"})
assert q.date_range.first == Timex.today()
assert q.date_range.last == Timex.today()
assert q.interval == "hour"
end
test "parses realtime format" do
q = Query.from(@site, %{"period" => "realtime"})
assert q.date_range.first == Timex.today()
assert q.date_range.last == Timex.today()
assert q.period == "realtime"
end
test "parses month format" do
q = Query.from(@site, %{"period" => "month", "date" => "2019-01-01"})
assert q.date_range.first == ~D[2019-01-01]
assert q.date_range.last == ~D[2019-01-31]
assert q.interval == "date"
end
test "parses 6 month format" do
q = Query.from(@site, %{"period" => "6mo"})
assert q.date_range.first ==
Timex.shift(Timex.today(), months: -5) |> Timex.beginning_of_month()
assert q.date_range.last == Timex.today() |> Timex.end_of_month()
assert q.interval == "month"
end
test "parses 12 month format" do
q = Query.from(@site, %{"period" => "12mo"})
assert q.date_range.first ==
Timex.shift(Timex.today(), months: -11) |> Timex.beginning_of_month()
assert q.date_range.last == Timex.today() |> Timex.end_of_month()
assert q.interval == "month"
end
test "parses year to date format" do
q = Query.from(@site, %{"period" => "year"})
assert q.date_range.first ==
Timex.now(@site.timezone) |> Timex.to_date() |> Timex.beginning_of_year()
assert q.date_range.last ==
Timex.now(@site.timezone) |> Timex.to_date() |> Timex.end_of_year()
assert q.interval == "month"
end
test "parses all time" do
q = Query.from(@site, %{"period" => "all"})
assert q.date_range.first == @site_inserted_at
assert q.date_range.last == Timex.today()
assert q.period == "all"
assert q.interval == "month"
end
test "parses all time in correct timezone" do
site = Map.put(@site, :timezone, "America/Cancun")
q = Query.from(site, %{"period" => "all"})
assert q.date_range.first == ~D[2019-12-31]
assert q.date_range.last == Timex.today("America/Cancun")
end
test "all time shows hourly if site is completely new" do
site = Map.put(@site, :stats_start_date, Timex.now())
q = Query.from(site, %{"period" => "all"})
assert q.date_range.first == Timex.today()
assert q.date_range.last == Timex.today()
assert q.period == "all"
assert q.interval == "hour"
end
test "all time shows daily if site is more than a day old" do
site = Map.put(@site, :stats_start_date, Timex.now() |> Timex.shift(days: -1))
q = Query.from(site, %{"period" => "all"})
assert q.date_range.first == Timex.today() |> Timex.shift(days: -1)
assert q.date_range.last == Timex.today()
assert q.period == "all"
assert q.interval == "date"
end
test "all time shows monthly if site is more than a month old" do
site = Map.put(@site, :stats_start_date, Timex.now() |> Timex.shift(months: -1))
q = Query.from(site, %{"period" => "all"})
assert q.date_range.first == Timex.today() |> Timex.shift(months: -1)
assert q.date_range.last == Timex.today()
assert q.period == "all"
assert q.interval == "month"
end
test "defaults to 30 days format" do
assert Query.from(@site, %{}) == Query.from(@site, %{"period" => "30d"})
end
test "parses custom format" do
q = Query.from(@site, %{"period" => "custom", "from" => "2019-01-01", "to" => "2019-01-15"})
assert q.date_range.first == ~D[2019-01-01]
assert q.date_range.last == ~D[2019-01-15]
assert q.interval == "date"
end
describe "filters" do
test "parses goal filter" do
filters = Jason.encode!(%{"goal" => "Signup"})
q = Query.from(@site, %{"period" => "6mo", "filters" => filters})
assert q.filters["goal"] == "Signup"
end
test "parses source filter" do
filters = Jason.encode!(%{"source" => "Twitter"})
q = Query.from(@site, %{"period" => "6mo", "filters" => filters})
assert q.filters["source"] == "Twitter"
end
end
end
| 30.668874 | 96 | 0.616713 |
791dc2df0d29bc3e5a8166113b9b379dd5b29c2e | 941 | exs | Elixir | test/type/type_function_var/intersection_test.exs | ityonemo/mavis | 6f71c1ff9e12626c1ac5fcd1276c9adb433bfb99 | [
"MIT"
] | 97 | 2020-09-22T01:52:19.000Z | 2022-03-21T17:50:13.000Z | test/type/type_function_var/intersection_test.exs | ityonemo/mavis | 6f71c1ff9e12626c1ac5fcd1276c9adb433bfb99 | [
"MIT"
] | 106 | 2020-09-22T18:55:28.000Z | 2021-11-30T01:51:04.000Z | test/type/type_function_var/intersection_test.exs | ityonemo/mavis | 6f71c1ff9e12626c1ac5fcd1276c9adb433bfb99 | [
"MIT"
] | 3 | 2020-10-27T22:36:56.000Z | 2022-01-25T21:00:24.000Z | defmodule TypeTest.TypeFunctionVar.IntersectionTest do
use ExUnit.Case, async: true
use Type.Operators
@moduletag :intersection
import Type, only: :macros
alias Type.Function.Var
@any any()
@any_var %Var{name: :foo}
describe "the default variable" do
test "intersects with any and self" do
assert @any_var == @any_var <~> @any
assert @any_var == @any <~> @any_var
assert @any_var == @any_var <~> @any_var
end
test "performs an intersection" do
assert %Var{constraint: integer()} =
@any_var <~> integer()
assert %Var{constraint: integer()} =
integer() <~> @any_var
end
end
@int_var %Var{name: :foo, constraint: integer()}
describe "a constrained variable" do
test "becomes more constrained" do
assert %Var{constraint: 1..10} =
@int_var <~> 1..10
assert %Var{constraint: 1..10} =
1..10 <~> @int_var
end
end
end
| 22.404762 | 54 | 0.621679 |
791e2a6db878258a347e7ce5070396c0958d0dab | 1,037 | exs | Elixir | test/phoenix_gon/utils_test.exs | defaulterr0r/phoenix_gon | 19e678706fc4474ad71156d85bf3068d39c81ac9 | [
"MIT"
] | null | null | null | test/phoenix_gon/utils_test.exs | defaulterr0r/phoenix_gon | 19e678706fc4474ad71156d85bf3068d39c81ac9 | [
"MIT"
] | null | null | null | test/phoenix_gon/utils_test.exs | defaulterr0r/phoenix_gon | 19e678706fc4474ad71156d85bf3068d39c81ac9 | [
"MIT"
] | null | null | null | defmodule PhoenixGon.UtilsTest do
use ExUnit.Case, async: false
use RouterHelper
import PhoenixGon.Utils
alias Plug.Conn
describe "#variables" do
test 'conn' do
conn =
%Conn{}
|> with_gon
actual = variables(conn)
expectation = %PhoenixGon.Storage{}
assert actual == expectation
end
end
describe "#assets" do
test 'conn' do
conn =
%Conn{}
|> with_gon
actual = assets(conn)
expectation = %{}
assert actual == expectation
end
end
describe "settings" do
test 'conn' do
conn =
%Conn{}
|> with_gon
actual = settings(conn)
expectation = [camel_case: false, compatibility: :native, namespace: nil]
assert actual == expectation
end
end
describe "#namescpase" do
test 'conn' do
conn =
%Conn{}
|> with_gon(namespace: TestCase)
actual = namespace(conn)
expectation = "TestCase"
assert actual == expectation
end
end
end
| 17 | 79 | 0.580521 |
791e30b08018b46653c31fe66b6d86eef3f65202 | 497 | exs | Elixir | test/phoenix_feathers/card_test.exs | timpile/phoenix_feathers | 6ac706d9384772f8cc5950ee2328fef77fa85413 | [
"MIT"
] | null | null | null | test/phoenix_feathers/card_test.exs | timpile/phoenix_feathers | 6ac706d9384772f8cc5950ee2328fef77fa85413 | [
"MIT"
] | null | null | null | test/phoenix_feathers/card_test.exs | timpile/phoenix_feathers | 6ac706d9384772f8cc5950ee2328fef77fa85413 | [
"MIT"
] | null | null | null | defmodule PhoenixFeathers.CardTest do
use ExUnit.Case
use PhoenixFeathers.LiveViewTest
defmodule MockLiveView do
use Phoenix.LiveView
def render(assigns) do
~L"""
<%= live_component @socket, PhoenixFeathers.Card do %>
<div>Inner block</div>
<% end %>
"""
end
end
test "render a basic card component", %{conn: conn} do
{:ok, _view, html} = live_isolated(conn, MockLiveView)
assert html =~ ~s|<div>Inner block</div>|
end
end
| 21.608696 | 62 | 0.633803 |
791e4ab9beece9d106a49c0967a4c0b32d274b49 | 30 | ex | Elixir | lib/phoenix/ash_dashboard.ex | ash-project/ash_dashboard | 7685db1fc2e0298d332a71e37bd18b558cb2abf5 | [
"MIT"
] | null | null | null | lib/phoenix/ash_dashboard.ex | ash-project/ash_dashboard | 7685db1fc2e0298d332a71e37bd18b558cb2abf5 | [
"MIT"
] | null | null | null | lib/phoenix/ash_dashboard.ex | ash-project/ash_dashboard | 7685db1fc2e0298d332a71e37bd18b558cb2abf5 | [
"MIT"
] | null | null | null | defmodule AshDashboard do
end
| 10 | 25 | 0.866667 |
791e720826140e0921383a7c078293550eead14e | 170 | exs | Elixir | test/test_helper.exs | StephaneRob/ancestry-ecto | 28b6b30dc63cb4085026973a6917f1eb7b0fe0bd | [
"MIT"
] | 2 | 2020-08-02T19:14:46.000Z | 2020-08-07T11:02:38.000Z | test/test_helper.exs | StephaneRob/ancestry | 28b6b30dc63cb4085026973a6917f1eb7b0fe0bd | [
"MIT"
] | null | null | null | test/test_helper.exs | StephaneRob/ancestry | 28b6b30dc63cb4085026973a6917f1eb7b0fe0bd | [
"MIT"
] | 1 | 2020-07-27T05:52:29.000Z | 2020-07-27T05:52:29.000Z | {:ok, _} = Application.ensure_all_started(:ex_machina)
ExUnit.start()
AncestryEcto.TestRepo.start_link([])
Ecto.Adapters.SQL.Sandbox.mode(AncestryEcto.TestRepo, :manual)
| 34 | 62 | 0.794118 |
791e7650662eecbaf529c73613eab50d5ee6da58 | 382 | ex | Elixir | phoenix_test/web/views/error_view.ex | mrk21/study_elixir | 1fb20f74f9f9e604ca0e757d0c45bf64b9193743 | [
"MIT"
] | null | null | null | phoenix_test/web/views/error_view.ex | mrk21/study_elixir | 1fb20f74f9f9e604ca0e757d0c45bf64b9193743 | [
"MIT"
] | null | null | null | phoenix_test/web/views/error_view.ex | mrk21/study_elixir | 1fb20f74f9f9e604ca0e757d0c45bf64b9193743 | [
"MIT"
] | null | null | null | defmodule PhoenixTest.ErrorView do
use PhoenixTest.Web, :view
def render("404.html", _assigns) do
"Page not found"
end
def render("500.html", _assigns) do
"Server internal error"
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.html", assigns
end
end
| 21.222222 | 47 | 0.704188 |
791e7fe1e6661eec7fda3467f993127053ce35cc | 5,726 | ex | Elixir | lib/mix/lib/releases/overlays.ex | TokiTori/distillery | 9158df6d17c8b70a201ade4ca0d3f243245ceebd | [
"MIT"
] | null | null | null | lib/mix/lib/releases/overlays.ex | TokiTori/distillery | 9158df6d17c8b70a201ade4ca0d3f243245ceebd | [
"MIT"
] | null | null | null | lib/mix/lib/releases/overlays.ex | TokiTori/distillery | 9158df6d17c8b70a201ade4ca0d3f243245ceebd | [
"MIT"
] | null | null | null | defmodule Mix.Releases.Overlays do
@moduledoc """
This module is responsible for applying overlays to a release, prior to packaging.
Overlays are templated with EEx, with bindings set to the values configured in `overlay_vars`.
There are some preconfigured overlay variables, namely:
- `erts_vsn`: The version of ERTS used by this release
- `release_name`: The name of the current release
- `release_version`: The version of the current release
For example, given a release named `my_release`, version `0.1.0`:
{:mkdir, "releases/<%= release_version %>/foo"}
The above overlay will create a directory, `rel/my_release/releases/0.1.0/foo`. Overlay input paths are
relative to the project root, but overlay output paths are relative to the root directory for the current
release, which is why the directory is created in `rel/my_release`, and not in the project root.
"""
alias Mix.Releases.Logger
@typep overlay ::
{:mkdir, String.t()}
| {:copy, String.t(), String.t()}
| {:link, String.t(), String.t()}
| {:template, String.t(), String.t()}
@typep error ::
{:error, {:invalid_overlay, term}}
| {:error, {:template_str, term()}}
| {:error, {:template, term()}}
| {:error, {:overlay_failed, module, term, overlay}}
@doc """
Applies a list of overlays to the current release.
Returns `{:ok, output_paths}` or `{:error, details}`, where `details` is
one of the following:
- {:invalid_overlay, term} - a malformed overlay object
- {:template_str, desc} - templating an overlay parameter failed
- {:template_file, file, line, desc} - a template overlay failed
- {:overlay_failed, term, overlay} - applying an overlay failed
"""
@spec apply(String.t(), list(overlay), Keyword.t()) :: {:ok, [String.t()]} | error
def apply(_ouput_dir, [], _overlay_vars), do: {:ok, []}
def apply(output_dir, overlays, overlay_vars) do
do_apply(output_dir, overlays, overlay_vars, [])
end
defp do_apply(_output_dir, [], _vars, acc), do: {:ok, acc}
defp do_apply(output_dir, [overlay | rest], overlay_vars, acc) when is_list(acc) do
case do_overlay(output_dir, overlay, overlay_vars) do
{:ok, path} ->
do_apply(output_dir, rest, overlay_vars, [path | acc])
{:error, {:invalid_overlay, _}} = err ->
err
{:error, {:template_str, _}} = err ->
err
{:error, {:template, _}} = err ->
err
{:error, reason} ->
{:error, {:overlay_failed, :file, {reason, overlay}}}
{:error, reason, file} ->
{:error, {:overlay_failed, :file, {reason, file, overlay}}}
end
end
@spec do_overlay(String.t(), overlay, Keyword.t()) :: {:ok, String.t()} | {:error, term}
defp do_overlay(output_dir, {:mkdir, path}, vars) when is_binary(path) do
with {:ok, path} <- template_str(path, vars),
_ <-
Logger.debug(
"Applying #{IO.ANSI.reset()}mkdir#{IO.ANSI.cyan()} overlay\n" <>
" dst: #{Path.relative_to_cwd(path)}"
),
expanded <- Path.join(output_dir, path),
:ok <- File.mkdir_p(expanded),
do: {:ok, path}
end
defp do_overlay(output_dir, {:copy, from, to}, vars) when is_binary(from) and is_binary(to) do
with {:ok, from} <- template_str(from, vars),
{:ok, to} <- template_str(to, vars),
_ <-
Logger.debug(
"Applying #{IO.ANSI.reset()}copy#{IO.ANSI.cyan()} overlay\n" <>
" src: #{Path.relative_to_cwd(from)}\n" <>
" dst: #{Path.relative_to_cwd(to)}"
),
expanded_to <- Path.join(output_dir, to),
{:ok, _} <- File.cp_r(from, expanded_to),
do: {:ok, to}
end
defp do_overlay(output_dir, {:link, from, to}, vars) when is_binary(from) and is_binary(to) do
with {:ok, from} <- template_str(from, vars),
{:ok, to} <- template_str(to, vars),
_ <-
Logger.debug(
"Applying #{IO.ANSI.reset()}link#{IO.ANSI.cyan()} overlay\n" <>
" src: #{Path.relative_to_cwd(from)}\n" <>
" dst: #{Path.relative_to_cwd(to)}"
),
expanded_to <- Path.join(output_dir, to),
_ <- File.rm(expanded_to),
:ok <- File.ln_s(from, expanded_to),
do: {:ok, to}
end
defp do_overlay(output_dir, {:template, tmpl_path, to}, vars)
when is_binary(tmpl_path) and is_binary(to) do
with {:ok, tmpl_path} <- template_str(tmpl_path, vars),
{:ok, to} <- template_str(to, vars),
{:ok, templated} <- template_file(tmpl_path, vars),
expanded_to <- Path.join(output_dir, to),
_ <-
Logger.debug(
"Applying #{IO.ANSI.reset()}template#{IO.ANSI.cyan()} overlay\n" <>
" src: #{Path.relative_to_cwd(tmpl_path)}\n" <> " dst: #{to}"
),
:ok <- File.mkdir_p(Path.dirname(expanded_to)),
:ok <- File.write(expanded_to, templated),
do: {:ok, to}
end
defp do_overlay(_output_dir, invalid, _), do: {:error, {:invalid_overlay, invalid}}
@spec template_str(String.t(), Keyword.t()) ::
{:ok, String.t()} | {:error, {:template_str, term}}
def template_str(str, overlay_vars) do
{:ok, EEx.eval_string(str, overlay_vars)}
rescue
err in [CompileError] ->
{:error, {:template_str, {str, err.description}}}
end
@spec template_file(String.t(), Keyword.t()) :: {:ok, String.t()} | {:error, {:template, term}}
def template_file(path, overlay_vars) do
{:ok, EEx.eval_file(path, overlay_vars)}
rescue
e ->
{:error, {:template, e}}
end
end
| 37.92053 | 107 | 0.591687 |
791e82939e53bc8b9734ec66153715b3d91bd36b | 1,137 | exs | Elixir | config/config.exs | xeejp/xee_attendance | 6cdd1bdff443d9c0bbff43f95a32dfaa2586ea74 | [
"MIT"
] | null | null | null | config/config.exs | xeejp/xee_attendance | 6cdd1bdff443d9c0bbff43f95a32dfaa2586ea74 | [
"MIT"
] | null | null | null | config/config.exs | xeejp/xee_attendance | 6cdd1bdff443d9c0bbff43f95a32dfaa2586ea74 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :attendance_system, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:attendance_system, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.677419 | 73 | 0.755497 |
791e86e2245ee72001d542e10a0ef0e56f3ce399 | 954 | ex | Elixir | test/support/channel_case.ex | slurmulon/thing | 21791e7a9579d41de28a0a6131218aed3679d033 | [
"Apache-2.0"
] | null | null | null | test/support/channel_case.ex | slurmulon/thing | 21791e7a9579d41de28a0a6131218aed3679d033 | [
"Apache-2.0"
] | null | null | null | test/support/channel_case.ex | slurmulon/thing | 21791e7a9579d41de28a0a6131218aed3679d033 | [
"Apache-2.0"
] | null | null | null | defmodule Thing.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
imports other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
alias Thing.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query, only: [from: 1, from: 2]
# The default endpoint for testing
@endpoint Thing.Endpoint
end
end
setup tags do
unless tags[:async] do
Ecto.Adapters.SQL.restart_test_transaction(Thing.Repo, [])
end
:ok
end
end
| 22.714286 | 64 | 0.698113 |
791ea487cee4ebc290bb40c752e3cce8ba74fbfd | 1,274 | ex | Elixir | lib/elsa/consumer.ex | fxn/elsa | 3d2b5eaccebab48799f8e9b68f78120cd1c3c0c1 | [
"Apache-2.0"
] | null | null | null | lib/elsa/consumer.ex | fxn/elsa | 3d2b5eaccebab48799f8e9b68f78120cd1c3c0c1 | [
"Apache-2.0"
] | null | null | null | lib/elsa/consumer.ex | fxn/elsa | 3d2b5eaccebab48799f8e9b68f78120cd1c3c0c1 | [
"Apache-2.0"
] | null | null | null | defmodule Elsa.Consumer do
@moduledoc """
Centralizes definition for common functions related
to consumer interactions.
"""
import Elsa.Supervisor, only: [registry: 1]
@type offset :: integer() | String.t()
@doc """
Retrieve the process id of a consumer registered to the
Elsa Registry and subscribes to it.
"""
@spec subscribe(Elsa.connection(), Elsa.topic(), Elsa.partition(), term()) :: {:ok, pid()} | {:error, term()}
def subscribe(connection, topic, partition, opts) do
pid = get_consumer(connection, topic, partition)
case :brod_consumer.subscribe(pid, self(), opts) do
:ok -> {:ok, pid}
{:error, reason} -> {:error, reason}
end
end
@doc """
Retrieves a process id of a consumer registered to the
Elsa Registry and performs a consume-ack of the messages
ready to be read off the topic.
"""
@spec ack(Elsa.connection(), Elsa.topic(), Elsa.partition(), offset()) :: :ok
def ack(connection, topic, partition, offset) do
pid = get_consumer(connection, topic, partition)
:brod_consumer.ack(pid, offset)
end
defp get_consumer(connection, topic, partition) do
consumer_name = :"consumer_#{topic}_#{partition}"
Elsa.Registry.whereis_name({registry(connection), consumer_name})
end
end
| 31.85 | 111 | 0.684458 |
791eac7fa4e1284f1c98b68add26a913c19f7103 | 2,993 | ex | Elixir | clients/cloud_functions/lib/google_api/cloud_functions/v1/model/audit_config.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/cloud_functions/lib/google_api/cloud_functions/v1/model/audit_config.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/cloud_functions/lib/google_api/cloud_functions/v1/model/audit_config.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudFunctions.V1.Model.AuditConfig do
@moduledoc """
Specifies the audit configuration for a service. The configuration determines which permission types are logged, and what identities, if any, are exempted from logging. An AuditConfig must have one or more AuditLogConfigs. If there are AuditConfigs for both `allServices` and a specific service, the union of the two AuditConfigs is used for that service: the log_types specified in each AuditConfig are enabled, and the exempted_members in each AuditLogConfig are exempted. Example Policy with multiple AuditConfigs: { "audit_configs": [ { "service": "allServices", "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:[email protected]" ] }, { "log_type": "DATA_WRITE" }, { "log_type": "ADMIN_READ" } ] }, { "service": "sampleservice.googleapis.com", "audit_log_configs": [ { "log_type": "DATA_READ" }, { "log_type": "DATA_WRITE", "exempted_members": [ "user:[email protected]" ] } ] } ] } For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ logging. It also exempts `[email protected]` from DATA_READ logging, and `[email protected]` from DATA_WRITE logging.
## Attributes
* `auditLogConfigs` (*type:* `list(GoogleApi.CloudFunctions.V1.Model.AuditLogConfig.t)`, *default:* `nil`) - The configuration for logging of each type of permission.
* `service` (*type:* `String.t`, *default:* `nil`) - Specifies a service that will be enabled for audit logging. For example, `storage.googleapis.com`, `cloudsql.googleapis.com`. `allServices` is a special value that covers all services.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:auditLogConfigs => list(GoogleApi.CloudFunctions.V1.Model.AuditLogConfig.t()) | nil,
:service => String.t() | nil
}
field(:auditLogConfigs, as: GoogleApi.CloudFunctions.V1.Model.AuditLogConfig, type: :list)
field(:service)
end
defimpl Poison.Decoder, for: GoogleApi.CloudFunctions.V1.Model.AuditConfig do
def decode(value, options) do
GoogleApi.CloudFunctions.V1.Model.AuditConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudFunctions.V1.Model.AuditConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 59.86 | 1,110 | 0.747077 |
791eadcfd32b3b6540ab81b1e8d9b25aac3761d9 | 3,242 | ex | Elixir | lib/teiserver/battle/lobby/lobby_chat.ex | beyond-all-reason/teiserver | e394b5efe561eaf5700bca62e4e4263d8f720ac5 | [
"MIT"
] | 4 | 2021-07-29T16:23:20.000Z | 2022-02-23T05:34:36.000Z | lib/teiserver/battle/lobby/lobby_chat.ex | beyond-all-reason/teiserver | e394b5efe561eaf5700bca62e4e4263d8f720ac5 | [
"MIT"
] | 14 | 2021-08-01T02:36:14.000Z | 2022-01-30T21:15:03.000Z | lib/teiserver/battle/lobby/lobby_chat.ex | beyond-all-reason/teiserver | e394b5efe561eaf5700bca62e4e4263d8f720ac5 | [
"MIT"
] | 7 | 2021-05-13T12:55:28.000Z | 2022-01-14T06:39:06.000Z | defmodule Teiserver.Battle.LobbyChat do
alias Teiserver.{User, Chat}
alias Teiserver.Battle.{Lobby}
alias Phoenix.PubSub
@spec say(Types.userid(), String.t(), Types.lobby_id()) :: :ok | {:error, any}
def say(userid, "!start" <> s, lobby_id), do: say(userid, "!cv start" <> s, lobby_id)
def say(userid, "!joinas spec", lobby_id), do: say(userid, "!!joinas spec", lobby_id)
def say(userid, "!joinas" <> s, lobby_id), do: say(userid, "!cv joinas" <> s, lobby_id)
def say(userid, msg, lobby_id) do
msg = String.replace(msg, "!!joinas spec", "!joinas spec")
case Teiserver.Coordinator.handle_in(userid, msg, lobby_id) do
:say -> do_say(userid, msg, lobby_id)
:handled -> :ok
end
end
@spec do_say(Types.userid(), String.t(), Types.lobby_id()) :: :ok
def do_say(userid, "$ " <> msg, lobby_id), do: do_say(userid, "$#{msg}", lobby_id)
def do_say(userid, msg, lobby_id) do
user = User.get_user_by_id(userid)
if not User.is_muted?(user) do
if user.bot == false do
case Lobby.get_lobby(lobby_id) do
nil -> nil
lobby ->
Chat.create_lobby_message(%{
content: msg,
lobby_guid: lobby.tags["server/match/uuid"],
inserted_at: Timex.now(),
user_id: userid,
})
end
end
PubSub.broadcast(
Central.PubSub,
"legacy_battle_updates:#{lobby_id}",
{:battle_updated, lobby_id, {userid, msg, lobby_id}, :say}
)
PubSub.broadcast(
Central.PubSub,
"teiserver_lobby_chat:#{lobby_id}",
{:lobby_chat, :say, lobby_id, userid, msg}
)
# Client.chat_flood_check(userid)
end
:ok
end
@spec sayex(Types.userid(), String.t(), Types.lobby_id()) :: :ok
def sayex(userid, msg, lobby_id) do
user = User.get_user_by_id(userid)
if not User.is_muted?(userid) do
if user.bot == false do
case Lobby.get_lobby(lobby_id) do
nil -> nil
lobby ->
Chat.create_lobby_message(%{
content: msg,
lobby_guid: lobby.tags["server/match/uuid"],
inserted_at: Timex.now(),
user_id: userid,
})
end
end
PubSub.broadcast(
Central.PubSub,
"legacy_battle_updates:#{lobby_id}",
{:battle_updated, lobby_id, {userid, msg, lobby_id}, :sayex}
)
PubSub.broadcast(
Central.PubSub,
"teiserver_lobby_chat:#{lobby_id}",
{:lobby_chat, :announce, lobby_id, userid, msg}
)
# Client.chat_flood_check(userid)
end
:ok
end
@spec sayprivateex(Types.userid(), Types.userid(), String.t(), Types.lobby_id()) :: :ok | {:error, any}
def sayprivateex(from_id, to_id, msg, lobby_id) do
sender = User.get_user_by_id(from_id)
if not User.is_muted?(sender) do
PubSub.broadcast(
Central.PubSub,
"legacy_user_updates:#{to_id}",
{:battle_updated, lobby_id, {from_id, msg, lobby_id}, :sayex}
)
PubSub.broadcast(
Central.PubSub,
"teiserver_client_messages:#{to_id}",
{:client_message, :lobby_direct_announce, to_id, {from_id, msg}}
)
end
end
end
| 30.299065 | 105 | 0.590993 |
791eebd933e57bbc0a55d2b535191fc1b0dba6f4 | 1,956 | ex | Elixir | clients/cloud_tasks/lib/google_api/cloud_tasks/v2beta2/model/run_task_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/cloud_tasks/lib/google_api/cloud_tasks/v2beta2/model/run_task_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/cloud_tasks/lib/google_api/cloud_tasks/v2beta2/model/run_task_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.CloudTasks.V2beta2.Model.RunTaskRequest do
@moduledoc """
Request message for forcing a task to run now using
RunTask.
## Attributes
* `responseView` (*type:* `String.t`, *default:* `nil`) - The response_view specifies which subset of the Task will be
returned.
By default response_view is BASIC; not all
information is retrieved by default because some data, such as
payloads, might be desirable to return only when needed because
of its large size or because of the sensitivity of data that it
contains.
Authorization for FULL requires
`cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
permission on the Task resource.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:responseView => String.t()
}
field(:responseView)
end
defimpl Poison.Decoder, for: GoogleApi.CloudTasks.V2beta2.Model.RunTaskRequest do
def decode(value, options) do
GoogleApi.CloudTasks.V2beta2.Model.RunTaskRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudTasks.V2beta2.Model.RunTaskRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.152542 | 122 | 0.736196 |
791ef93c3a49db5d7cb2dc7d269c1843a0e0bf9e | 233 | exs | Elixir | config/test.exs | fertapric/cldr_dates_times | d1eff81c7a6e4d84484646465650d26d5dfc51db | [
"Apache-2.0"
] | 36 | 2019-07-26T18:08:04.000Z | 2022-03-08T00:37:49.000Z | config/test.exs | fertapric/cldr_dates_times | d1eff81c7a6e4d84484646465650d26d5dfc51db | [
"Apache-2.0"
] | 20 | 2019-06-12T07:44:57.000Z | 2022-03-02T15:31:52.000Z | config/test.exs | fertapric/cldr_dates_times | d1eff81c7a6e4d84484646465650d26d5dfc51db | [
"Apache-2.0"
] | 4 | 2020-05-29T08:42:08.000Z | 2022-03-01T15:40:24.000Z | # In test mode we compile and test all locales
use Mix.Config
config :ex_cldr,
default_locale: "en",
default_backend: MyApp.Cldr
config :ex_unit,
module_load_timeout: 220_000,
case_load_timeout: 220_000,
timeout: 220_000
| 19.416667 | 46 | 0.763948 |
791efd97eea8d5428c779c2c5c21be756cc652b4 | 395 | ex | Elixir | lib/forage/forage_plan.ex | lumenlunae/forage | af85889bb5a2e154d1aa92e98454c29b6d724120 | [
"MIT"
] | null | null | null | lib/forage/forage_plan.ex | lumenlunae/forage | af85889bb5a2e154d1aa92e98454c29b6d724120 | [
"MIT"
] | null | null | null | lib/forage/forage_plan.ex | lumenlunae/forage | af85889bb5a2e154d1aa92e98454c29b6d724120 | [
"MIT"
] | null | null | null | defmodule Forage.ForagePlan do
@moduledoc """
A forage plan, which can be used to run paginated queries on your repo.
"""
defstruct search: [],
sort: [],
pagination: []
def new(opts) do
%__MODULE__{
search: Keyword.get(opts, :search, []),
sort: Keyword.get(opts, :sort, []),
pagination: Keyword.get(opts, :pagination, [])
}
end
end
| 23.235294 | 73 | 0.58481 |
791f19a2beb8042b0dad68a573142dd8dd7f3ff4 | 78 | exs | Elixir | config/prod.exs | nicksanford/fido | a7dac2161eb486eb3209284e3fe1a9e497e31329 | [
"MIT"
] | null | null | null | config/prod.exs | nicksanford/fido | a7dac2161eb486eb3209284e3fe1a9e497e31329 | [
"MIT"
] | null | null | null | config/prod.exs | nicksanford/fido | a7dac2161eb486eb3209284e3fe1a9e497e31329 | [
"MIT"
] | null | null | null | use Mix.Config
config :logger, level: :info
import_config "prod.secret.exs"
| 13 | 31 | 0.75641 |
791f3f8ca1cad8d79f72b062f4f4bf80948518d9 | 1,335 | ex | Elixir | clients/accelerated_mobile_page_url/lib/google_api/accelerated_mobile_page_url/v1/connection.ex | ericrwolfe/elixir-google-api | 3dc0f17edd5e2d6843580c16ddae3bf84b664ffd | [
"Apache-2.0"
] | null | null | null | clients/accelerated_mobile_page_url/lib/google_api/accelerated_mobile_page_url/v1/connection.ex | ericrwolfe/elixir-google-api | 3dc0f17edd5e2d6843580c16ddae3bf84b664ffd | [
"Apache-2.0"
] | null | null | null | clients/accelerated_mobile_page_url/lib/google_api/accelerated_mobile_page_url/v1/connection.ex | ericrwolfe/elixir-google-api | 3dc0f17edd5e2d6843580c16ddae3bf84b664ffd | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AcceleratedMobilePageUrl.V1.Connection do
@moduledoc """
Handle Tesla connections for GoogleApi.AcceleratedMobilePageUrl.V1.
"""
use Tesla
# Add any middleware here (authentication)
plug(Tesla.Middleware.BaseUrl, "https://acceleratedmobilepageurl.googleapis.com")
plug(Tesla.Middleware.Headers, %{"User-Agent" => "Elixir"})
plug(Tesla.Middleware.EncodeJson)
@doc """
Configure an authless client connection
# Returns
Tesla.Env.client
"""
@spec new() :: Tesla.Env.client()
def new do
Tesla.build_client([])
end
end
| 31.046512 | 83 | 0.746067 |
791f4a82e92e4797b8ed87c92372666b0620610f | 3,578 | ex | Elixir | lib/square_up/resources/v2/order.ex | beaver21/SquareUp | c9791d96ed9335926933403a966eba5076fbc15b | [
"MIT"
] | 4 | 2020-10-21T18:34:50.000Z | 2022-03-16T06:25:44.000Z | lib/square_up/resources/v2/order.ex | beaver21/SquareUp | c9791d96ed9335926933403a966eba5076fbc15b | [
"MIT"
] | 5 | 2020-10-21T23:16:32.000Z | 2021-05-13T13:42:44.000Z | lib/square_up/resources/v2/order.ex | beaver21/SquareUp | c9791d96ed9335926933403a966eba5076fbc15b | [
"MIT"
] | 3 | 2020-10-21T21:20:36.000Z | 2021-03-15T18:00:30.000Z | defmodule SquareUp.V2.Order do
import Norm
import SquareUp.Client, only: [call: 2]
@spec update(
SquareUp.Client.t(),
%{required(:order_id) => binary()},
SquareUp.TypeSpecs.update_order_request(),
%{}
) :: SquareUp.Client.response(SquareUp.TypeSpecs.update_order_response())
def update(client, path_params \\ %{}, params \\ %{}, query_params \\ %{}) do
path_params_spec = schema(%{order_id: spec(is_binary())})
params_spec = Norm.Delegate.delegate(&SquareUp.NormSchema.update_order_request/0)
query_params_spec = schema(%{})
response_spec = {:delegate, &SquareUp.ResponseSchema.update_order_response/0}
call(client, %{
method: :put,
path_params: path_params,
params: params,
query_params: query_params,
path_params_spec: path_params_spec,
params_spec: params_spec,
query_params_spec: query_params_spec,
response_spec: response_spec,
path: "/v2/orders/{order_id}"
})
end
@spec create(SquareUp.Client.t(), %{}, SquareUp.TypeSpecs.create_order_request(), %{}) ::
SquareUp.Client.response(SquareUp.TypeSpecs.create_order_response())
def create(client, path_params \\ %{}, params \\ %{}, query_params \\ %{}) do
path_params_spec = schema(%{})
params_spec = Norm.Delegate.delegate(&SquareUp.NormSchema.create_order_request/0)
query_params_spec = schema(%{})
response_spec = {:delegate, &SquareUp.ResponseSchema.create_order_response/0}
call(client, %{
method: :post,
path_params: path_params,
params: params,
query_params: query_params,
path_params_spec: path_params_spec,
params_spec: params_spec,
query_params_spec: query_params_spec,
response_spec: response_spec,
path: "/v2/orders"
})
end
@spec calculate(SquareUp.Client.t(), %{}, SquareUp.TypeSpecs.calculate_order_request(), %{}) ::
SquareUp.Client.response(SquareUp.TypeSpecs.calculate_order_response())
def calculate(client, path_params \\ %{}, params \\ %{}, query_params \\ %{}) do
path_params_spec = schema(%{})
params_spec = Norm.Delegate.delegate(&SquareUp.NormSchema.calculate_order_request/0)
query_params_spec = schema(%{})
response_spec = {:delegate, &SquareUp.ResponseSchema.calculate_order_response/0}
call(client, %{
method: :post,
path_params: path_params,
params: params,
query_params: query_params,
path_params_spec: path_params_spec,
params_spec: params_spec,
query_params_spec: query_params_spec,
response_spec: response_spec,
path: "/v2/orders/calculate"
})
end
@spec pay(
SquareUp.Client.t(),
%{required(:order_id) => binary()},
SquareUp.TypeSpecs.pay_order_request(),
%{}
) :: SquareUp.Client.response(SquareUp.TypeSpecs.pay_order_response())
def pay(client, path_params \\ %{}, params \\ %{}, query_params \\ %{}) do
path_params_spec = schema(%{order_id: spec(is_binary())})
params_spec = Norm.Delegate.delegate(&SquareUp.NormSchema.pay_order_request/0)
query_params_spec = schema(%{})
response_spec = {:delegate, &SquareUp.ResponseSchema.pay_order_response/0}
call(client, %{
method: :post,
path_params: path_params,
params: params,
query_params: query_params,
path_params_spec: path_params_spec,
params_spec: params_spec,
query_params_spec: query_params_spec,
response_spec: response_spec,
path: "/v2/orders/{order_id}/pay"
})
end
end
| 35.425743 | 97 | 0.671045 |
791f5d00f22b5f53ffdc470aa4f3d6f784b98bfc | 2,639 | ex | Elixir | clients/container_analysis/lib/google_api/container_analysis/v1alpha1/model/slsa_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/container_analysis/lib/google_api/container_analysis/v1alpha1/model/slsa_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/container_analysis/lib/google_api/container_analysis/v1alpha1/model/slsa_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ContainerAnalysis.V1alpha1.Model.SlsaMetadata do
@moduledoc """
Other properties of the build.
## Attributes
* `buildFinishedOn` (*type:* `DateTime.t`, *default:* `nil`) - The timestamp of when the build completed.
* `buildInvocationId` (*type:* `String.t`, *default:* `nil`) - Identifies the particular build invocation, which can be useful for finding associated logs or other ad-hoc analysis. The value SHOULD be globally unique, per in-toto Provenance spec.
* `buildStartedOn` (*type:* `DateTime.t`, *default:* `nil`) - The timestamp of when the build started.
* `completeness` (*type:* `GoogleApi.ContainerAnalysis.V1alpha1.Model.SlsaCompleteness.t`, *default:* `nil`) - Indicates that the builder claims certain fields in this message to be complete.
* `reproducible` (*type:* `boolean()`, *default:* `nil`) - If true, the builder claims that running the recipe on materials will produce bit-for-bit identical output.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:buildFinishedOn => DateTime.t() | nil,
:buildInvocationId => String.t() | nil,
:buildStartedOn => DateTime.t() | nil,
:completeness => GoogleApi.ContainerAnalysis.V1alpha1.Model.SlsaCompleteness.t() | nil,
:reproducible => boolean() | nil
}
field(:buildFinishedOn, as: DateTime)
field(:buildInvocationId)
field(:buildStartedOn, as: DateTime)
field(:completeness, as: GoogleApi.ContainerAnalysis.V1alpha1.Model.SlsaCompleteness)
field(:reproducible)
end
defimpl Poison.Decoder, for: GoogleApi.ContainerAnalysis.V1alpha1.Model.SlsaMetadata do
def decode(value, options) do
GoogleApi.ContainerAnalysis.V1alpha1.Model.SlsaMetadata.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ContainerAnalysis.V1alpha1.Model.SlsaMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 44.728814 | 250 | 0.732853 |
791f5ead8737d73523df71d744ee4887f0107d6b | 61 | ex | Elixir | apps/robby_web/web/views/maps_view.ex | jeffweiss/openrobby | 9fed2024e6ce87a6fe27ef3af85558f3116aca2a | [
"Apache-2.0"
] | 3 | 2021-04-16T21:54:55.000Z | 2021-04-30T22:15:41.000Z | apps/robby_web/web/views/maps_view.ex | jeffweiss/openrobby | 9fed2024e6ce87a6fe27ef3af85558f3116aca2a | [
"Apache-2.0"
] | 1 | 2021-06-29T15:54:19.000Z | 2021-06-29T15:54:19.000Z | apps/robby_web/web/views/maps_view.ex | jeffweiss/openrobby | 9fed2024e6ce87a6fe27ef3af85558f3116aca2a | [
"Apache-2.0"
] | 2 | 2021-04-16T22:23:16.000Z | 2021-05-26T15:52:55.000Z | defmodule RobbyWeb.MapsView do
use RobbyWeb.Web, :view
end
| 15.25 | 30 | 0.786885 |
791f68bf14fbc4511e9bdd49d1c3fde5f3947fe1 | 608 | ex | Elixir | lib/mp_api_web/controllers/session_controller.ex | jsvelasquezv/mp_api | 9a2262188b5b12c0e2ecd9284a8e7f445d2be4a0 | [
"MIT"
] | null | null | null | lib/mp_api_web/controllers/session_controller.ex | jsvelasquezv/mp_api | 9a2262188b5b12c0e2ecd9284a8e7f445d2be4a0 | [
"MIT"
] | null | null | null | lib/mp_api_web/controllers/session_controller.ex | jsvelasquezv/mp_api | 9a2262188b5b12c0e2ecd9284a8e7f445d2be4a0 | [
"MIT"
] | null | null | null | defmodule MpApiWeb.SessionController do
use MpApiWeb, :controller
import MpApiWeb.Authorize
alias Phauxth.Confirm.Login
# If you are using Argon2 or Pbkdf2, add crypto: Comeonin.Argon2
# or crypto: Comeonin.Pbkdf2 to Login.verify (after Accounts)
def create(conn, %{"session" => params}) do
case Login.verify(params, MpApi.Accounts, crypto: Comeonin.Argon2) do
{:ok, user} ->
token = Phauxth.Token.sign(conn, user.id)
render(conn, MpApiWeb.SessionView, "info.json", %{info: token})
{:error, _message} ->
error(conn, :unauthorized, 401)
end
end
end
| 32 | 73 | 0.682566 |
791f7105e615ea824b5e1e07b88307ed84fd9b50 | 8,482 | ex | Elixir | lib/chaperon/action/http.ex | jarrodmoldrich/chaperon | 6dffc1a744f8ac11581af89cf6cd99a62f52dd27 | [
"MIT"
] | null | null | null | lib/chaperon/action/http.ex | jarrodmoldrich/chaperon | 6dffc1a744f8ac11581af89cf6cd99a62f52dd27 | [
"MIT"
] | null | null | null | lib/chaperon/action/http.ex | jarrodmoldrich/chaperon | 6dffc1a744f8ac11581af89cf6cd99a62f52dd27 | [
"MIT"
] | null | null | null | defmodule Chaperon.Action.HTTP do
@moduledoc """
HTTP based actions to be run in a `Chaperon.Scenario` module for a given
`Chaperon.Session`.
This supports `GET`, `POST`, `PUT`, `PATCH`, `DELETE` & `HEAD` requests with
support for optional headers & query params.
"""
defstruct method: :get,
path: nil,
headers: %{},
params: %{},
body: nil,
decode: nil,
callback: nil,
metrics_url: nil,
hackney: nil
@type method :: :get | :post | :put | :patch | :delete | :head
@type options :: [
form: map | Keyword.t(),
json: map | Keyword.t(),
headers: map | Keyword.t(),
params: map | Keyword.t(),
decode: :json | (HTTPoison.Response.t() -> any),
with_result: Chaperon.Session.result_callback(),
metrics_url: String.t()
]
@type t :: %Chaperon.Action.HTTP{
method: method,
path: String.t(),
headers: map,
params: map,
body: binary,
decode: :json | (HTTPoison.Response.t() -> any),
callback: Chaperon.Session.result_callback(),
metrics_url: String.t()
}
@spec get(String.t(), options) :: t
def get(path, opts) do
%Chaperon.Action.HTTP{
method: :get,
path: path
}
|> add_options(opts)
end
@spec post(String.t(), options) :: t
def post(path, opts) do
%Chaperon.Action.HTTP{
method: :post,
path: path
}
|> add_options(opts)
end
@spec put(String.t(), options) :: t
def put(path, opts) do
%Chaperon.Action.HTTP{
method: :put,
path: path
}
|> add_options(opts)
end
@spec patch(String.t(), options) :: t
def patch(path, opts) do
%Chaperon.Action.HTTP{
method: :patch,
path: path
}
|> add_options(opts)
end
@spec delete(String.t(), options) :: t
def delete(path, opts \\ []) do
%Chaperon.Action.HTTP{
method: :delete,
path: path
}
|> add_options(opts)
end
alias __MODULE__
alias Chaperon.Session
def url(%{path: ""}, %Session{config: %{base_url: base_url}}) do
base_url <> "/"
end
def url(%{path: path}, %Session{config: %{base_url: base_url}}) do
if is_full_url?(path) do
path
else
base_url <> path
end
end
def url(%{path: path}, _) do
path
end
def is_full_url?("http://" <> _), do: true
def is_full_url?("https://" <> _), do: true
def is_full_url?("ws://" <> _), do: true
def is_full_url?("wss://" <> _), do: true
def is_full_url?(_), do: false
def full_url(action = %HTTP{method: method, params: params}, session) do
url = url(action, session)
case method do
:get -> url <> query_params_string(params)
_ -> url
end
end
def metrics_url(%{metrics_url: metrics_url}, %Session{config: %{base_url: base_url}})
when not is_nil(metrics_url) do
base_url <> metrics_url
end
def metrics_url(action, session) do
if session.config[:skip_query_params_in_metrics] do
action
|> url(session)
else
action
|> full_url(session)
end
end
def full_path(%{path: path, params: params}), do: path <> query_params_string(params)
def query_params_string([]), do: ""
def query_params_string(params) do
case URI.encode_query(params) do
"" -> ""
q -> "?" <> q
end
end
def options(action, session) do
opts =
session.config
|> Map.get(:http, %{})
|> Enum.into([])
|> Keyword.merge(params: action.params)
case hackney_opts(action, session) do
[] ->
opts
hackney_opts ->
opts
|> Keyword.merge(hackney: hackney_opts)
end
end
@default_headers %{
"User-Agent" => "chaperon",
"Accept" => "*/*"
}
@spec add_options(any, Chaperon.Action.HTTP.options()) :: t
def add_options(action, opts) do
alias Keyword, as: KW
import Map, only: [merge: 2]
headers = opts[:headers] || %{}
params = opts[:params] || %{}
decode = opts[:decode]
callback = opts[:with_result]
metrics_url = opts[:metrics_url]
hackney = opts[:hackney]
{new_headers, body} =
opts
|> KW.delete(:headers)
|> KW.delete(:params)
|> KW.delete(:decode)
|> KW.delete(:with_result)
|> KW.delete(:metrics_url)
|> KW.delete(:hackney)
|> parse_body
headers =
action.headers
|> merge(@default_headers)
|> merge(headers)
|> merge(new_headers)
%{
action
| headers: headers,
params: params,
body: body,
decode: decode,
callback: callback,
metrics_url: metrics_url,
hackney: hackney
}
end
defp hackney_opts(_action, session) do
opts = [
cookie: session.cookies,
basic_auth: session.config[:basic_auth],
pool: :chaperon,
insecure: true
]
opts
|> Enum.map(&hackney_opt/1)
|> Enum.reject(&is_nil/1)
end
# don't pass if no value set
defp hackney_opt({_key, nil}), do: nil
# don't pass empty list of cookies
defp hackney_opt({:cookie, []}), do: nil
# pass everything else as hackney option
defp hackney_opt(opt), do: opt
defp parse_body([]), do: {%{}, ""}
defp parse_body(json: data) when is_list(data) do
data =
if Keyword.keyword?(data) do
data |> Enum.into(%{})
else
data
end
data
|> json_body
end
defp parse_body(json: data), do: data |> json_body
defp parse_body(form: data), do: data |> form_body
defp json_body(data) do
{
%{"Content-Type" => "application/json", "Accept" => "application/json"},
data |> Poison.encode!()
}
end
defp form_body(data) do
{
%{"Content-Type" => "application/x-www-form-urlencoded"},
data |> URI.encode_query()
}
end
end
defimpl Chaperon.Actionable, for: Chaperon.Action.HTTP do
alias Chaperon.Action.Error
alias Chaperon.Action.HTTP
import Chaperon.Timing
import Chaperon.Session
use Chaperon.Session.Logging
def run(action, session) do
full_url = HTTP.full_url(action, session)
session
|> log_info("#{action.method |> to_string |> String.upcase()} #{full_url}")
start = timestamp()
case HTTPoison.request(
action.method,
HTTP.url(action, session),
action.body || "",
action.headers,
HTTP.options(action, session)
) do
{:ok, response} ->
session
|> add_result(action, response)
|> add_request_metrics(action, response, timestamp() - start)
|> store_response_cookies(response)
|> run_callback_if_defined(action, response)
|> ok
{:error, reason} ->
session
|> log_error("HTTP action #{action} failed")
session =
session
|> run_error_callback(action, reason)
{:error, %Error{reason: reason, action: action, session: session}}
end
end
defp add_request_metrics(
session,
action,
_response = %HTTPoison.Response{status_code: status_code},
duration
) do
metrics_url = HTTP.metrics_url(action, session)
session =
session
|> add_metric({action.method, metrics_url}, duration)
case status_code do
c when c < 400 ->
session
c when c in 400..599 ->
session
|> add_metric({{:error, {:http, c}}, {action.method, metrics_url}}, duration)
end
end
def run_callback_if_defined(session, action, response) do
case response.status_code do
code when code in 200..399 ->
session
|> log_debug("HTTP Response #{action} : #{code}")
|> run_callback(action, response)
code ->
session
|> log_warn("HTTP Response #{action} failed with status code: #{code}")
|> log_warn(response.body)
|> run_error_callback(action, response)
end
end
def abort(action, session) do
# TODO
{:ok, action, session}
end
end
defimpl String.Chars, for: Chaperon.Action.HTTP do
alias Chaperon.Action.HTTP
@methods [:get, :post, :put, :patch, :delete, :head]
@method_strings @methods
|> Enum.map(&{&1, &1 |> Kernel.to_string() |> String.upcase()})
|> Enum.into(%{})
def to_string(http) do
"#{@method_strings[http.method]} #{HTTP.full_url(http, %{})}"
end
end
| 23.626741 | 87 | 0.578166 |
791fccbc097737bd268ab93412f9857aa3298dc7 | 3,852 | ex | Elixir | lib/live_view_example_usage_web/live/chat/index.ex | braynm/phoenix-liveview-example-usage | a79782b29a507ef495a6cf59cf37d78e2c116d14 | [
"MIT"
] | 3 | 2021-09-13T07:07:46.000Z | 2021-11-21T09:42:39.000Z | lib/live_view_example_usage_web/live/chat/index.ex | braynm/phoenix-liveview-example-usage | a79782b29a507ef495a6cf59cf37d78e2c116d14 | [
"MIT"
] | 2 | 2021-09-18T17:16:16.000Z | 2021-11-21T23:25:23.000Z | lib/live_view_example_usage_web/live/chat/index.ex | braynm/phoenix-liveview-example-usage | a79782b29a507ef495a6cf59cf37d78e2c116d14 | [
"MIT"
] | 1 | 2021-09-13T07:07:50.000Z | 2021-09-13T07:07:50.000Z | defmodule LiveViewExampleUsageWeb.ChatLive do
use Phoenix.LiveView
alias LiveViewExampleUsageWeb.Presence
def mount(_params, session, socket) do
socket = redirect_to_login(session["user"], socket)
if connected?(socket) do
join_topics_and_track_presence(session["user"])
end
chats = topics(session["user"])
socket =
socket
|> assign(user: session["user"])
|> assign(chats: transform_chat_list(session["user"], chats))
send(self(), :chat_statuses_on_mount)
{:ok, socket}
end
def handle_info(:chat_statuses_on_mount, socket) do
chats =
Enum.map(socket.assigns.chats, fn chat ->
case Map.fetch(Presence.list(chat.chat), chat.name) do
:error -> chat
_ -> %{chat | online: true}
end
end)
socket =
socket
|> assign(chats: chats)
{:noreply, socket}
end
def handle_info(%{event: "presence_diff", topic: topic, payload: payload}, socket) do
send(self(), {:map_user_leaves_status, topic, payload.leaves})
send(self(), {:map_user_joins_status, topic, payload.joins})
{:noreply, socket}
end
def handle_info({:map_user_leaves_status, topic, leaves}, socket) do
user = Map.keys(leaves) |> List.first()
socket =
if user !== nil && user !== socket.assigns.user do
chats = update_chat_list_online_status(topic, socket.assigns.chats, false)
assign(socket, chats: chats)
else
socket
end
{:noreply, socket}
end
def handle_info({:map_user_joins_status, topic, joins}, socket) do
user = Map.keys(joins) |> List.first()
socket =
if user !== nil && user !== socket.assigns.user do
chats = update_chat_list_online_status(topic, socket.assigns.chats, true)
assign(socket, chats: chats)
else
socket
end
{:noreply, socket}
end
def handle_info({:send_message_to_other, payload}, socket) do
socket =
socket
|> push_event("new_message", payload)
{:noreply, socket}
end
def handle_event("send_message", payload, socket) do
message = %{
"message" => payload["value"],
"chat" => payload["chat"],
"user" => socket.assigns.user
}
Phoenix.PubSub.broadcast_from!(
LiveViewExampleUsage.PubSub,
self(),
payload["chat"],
{:send_message_to_other, message}
)
socket =
socket
|> push_event("new_message", message)
{:noreply, socket}
end
defp transform_chat_list(logged_in_user, topics) do
Enum.map(topics, fn topic ->
%{
chat: topic,
name: LiveViewExampleUsage.Chat.room_list_append_chat_name(logged_in_user, topic),
online: false
}
end)
end
defp topics(logged_in_user) do
users =
Enum.map(LiveViewExampleUsage.Chat.user_list(), fn user -> Map.get(user, :username) end)
# Example chat room list for logged-in user "FRODO":
# Normalize list first by, sorting each item pair (e.g frodo, sam) alphabetically
# > ["chat:frodo-sam", "chat:aragorn-frodo", "chat:frodo-gandalf"]
LiveViewExampleUsage.Chat.user_to_chat_room_list(
logged_in_user,
users
)
end
defp join_topics_and_track_presence(logged_in_user) do
topics = topics(logged_in_user)
Enum.map(topics, fn topic ->
LiveViewExampleUsageWeb.Endpoint.subscribe(topic)
Presence.track(
self(),
topic,
logged_in_user,
%{}
)
end)
end
defp redirect_to_login(nil, socket), do: redirect(socket, to: "/chat/login")
defp redirect_to_login(_user, socket), do: socket
defp update_chat_list_online_status(topic, chats, value) do
Enum.map(
chats,
fn
%{chat: ^topic} = chat ->
%{chat | online: value}
chat ->
chat
end
)
end
end
| 24.535032 | 94 | 0.631101 |
791fdaa8244c3dd9f8c2c575c38e90f7bdcbb247 | 698 | exs | Elixir | test/gandalf/grant_types/client_credentials_test.exs | pragmaticivan/gandalf | d6f79489104f3b3544247856bb93679f47ec9e0d | [
"Apache-2.0",
"MIT"
] | 1 | 2018-05-01T01:34:56.000Z | 2018-05-01T01:34:56.000Z | test/gandalf/grant_types/client_credentials_test.exs | pragmaticivan/gandalf | d6f79489104f3b3544247856bb93679f47ec9e0d | [
"Apache-2.0",
"MIT"
] | null | null | null | test/gandalf/grant_types/client_credentials_test.exs | pragmaticivan/gandalf | d6f79489104f3b3544247856bb93679f47ec9e0d | [
"Apache-2.0",
"MIT"
] | null | null | null | defmodule Gandalf.GrantType.ClientCredentialsTest do
use ExUnit.Case
use Gandalf.DB.Test.DataCase
use Gandalf.RepoBase
import Gandalf.Factory
alias Gandalf.GrantType.ClientCredentials, as: ClientCredentialsGrantType
setup do
client_owner = insert(:user)
client = insert(:client, user_id: client_owner.id)
params = %{"client_id" => client.id, "client_secret" => client.secret}
{:ok, [params: params]}
end
test "oauth2 authorization with client_credentials grant type", %{params: params} do
access_token = ClientCredentialsGrantType.authorize(params)
refute is_nil(access_token)
assert access_token.details[:grant_type] == "client_credentials"
end
end
| 33.238095 | 86 | 0.753582 |
79200f8cd27706d00007bb4566e81f00c4a0659c | 408 | exs | Elixir | test/models/tag_test.exs | cdegroot/clueless | c7f575159a42f55eb843df0e37802e778826c845 | [
"MIT"
] | null | null | null | test/models/tag_test.exs | cdegroot/clueless | c7f575159a42f55eb843df0e37802e778826c845 | [
"MIT"
] | 4 | 2015-12-18T20:35:49.000Z | 2015-12-18T20:36:43.000Z | test/models/tag_test.exs | cdegroot/clueless | c7f575159a42f55eb843df0e37802e778826c845 | [
"MIT"
] | null | null | null | defmodule Clueless.TagTest do
use Clueless.ModelCase
alias Clueless.Tag
@valid_attrs %{name: "some content"}
@invalid_attrs %{}
test "changeset with valid attributes" do
changeset = Tag.changeset(%Tag{}, @valid_attrs)
assert changeset.valid?
end
test "changeset with invalid attributes" do
changeset = Tag.changeset(%Tag{}, @invalid_attrs)
refute changeset.valid?
end
end
| 21.473684 | 53 | 0.715686 |
79205012d544a1751df2ea535a282ba6e5627102 | 1,986 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_delete_conversation_model_operation_metadata.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_delete_conversation_model_operation_metadata.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_delete_conversation_model_operation_metadata.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2DeleteConversationModelOperationMetadata do
@moduledoc """
Metadata for a ConversationModels.DeleteConversationModel operation.
## Attributes
* `conversationModel` (*type:* `String.t`, *default:* `nil`) - The resource name of the conversation model. Format: `projects//conversationModels/`
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - Timestamp when delete conversation model request was created. The time is measured on server side.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:conversationModel => String.t() | nil,
:createTime => DateTime.t() | nil
}
field(:conversationModel)
field(:createTime, as: DateTime)
end
defimpl Poison.Decoder,
for:
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2DeleteConversationModelOperationMetadata do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2DeleteConversationModelOperationMetadata.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2DeleteConversationModelOperationMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.842105 | 160 | 0.752266 |
7920625619ed02d9d7acd8802481d79e647292ab | 1,570 | ex | Elixir | apps/tfcon_web/lib/tfcon_web/controllers/auth_controller.ex | shtanaka/elixir-transfer-control | 1b325a4c0c5ee10551515d7a440e503ed68d4ddf | [
"MIT"
] | null | null | null | apps/tfcon_web/lib/tfcon_web/controllers/auth_controller.ex | shtanaka/elixir-transfer-control | 1b325a4c0c5ee10551515d7a440e503ed68d4ddf | [
"MIT"
] | 2 | 2021-03-10T02:00:57.000Z | 2021-05-10T21:22:39.000Z | apps/tfcon_web/lib/tfcon_web/controllers/auth_controller.ex | shtanaka/elixir-transfer-control | 1b325a4c0c5ee10551515d7a440e503ed68d4ddf | [
"MIT"
] | null | null | null | defmodule TfconWeb.AuthController do
use TfconWeb, :controller
use PhoenixSwagger
alias Tfcon.{Accounts, Guardian}
alias Tfcon.JsonHandler
def create(conn, %{"account_number" => account_number, "password" => password}) do
auth_data = Accounts.authenticate_user(account_number, password)
login_reply(conn, auth_data)
end
swagger_path :create do
post "/api/v1/auth"
summary "get auth data"
description "get auth data"
parameters do
login_credentials :body, Schema.ref(:Auth), "Login credentials"
end
response 200, "Ok", Schema.ref(:AuthResponse)
end
defp login_reply(conn, {:ok, user}) do
{:ok, token, _} = Guardian.encode_and_sign(user)
json(conn, JsonHandler.success_json(%{token: token}))
end
defp login_reply(conn, {:error, reason}) do
conn
|> put_status(401)
|> json(JsonHandler.error_json(%{errors: [reason]}))
end
def swagger_definitions do
%{
Auth: swagger_schema do
title "Authentication Response"
description "Authentication Response of application"
properties do
account_number :integer, "", required: true
password :string, "", required: true
end
example %{
account_number: 1,
password: "mypassword"
}
end,
AuthResponse: swagger_schema do
title "Authentication"
description "Authentication of application"
properties do
token :string, "", required: true
end
example %{token: "token"}
end
}
end
end
| 27.068966 | 84 | 0.645223 |
79206427ad789c24e595e53c84cc5e3f636f64ca | 1,169 | exs | Elixir | test/storage_test.exs | aforward/temporal | ec71f48719c471a72a490583abe692c314dff080 | [
"MIT"
] | null | null | null | test/storage_test.exs | aforward/temporal | ec71f48719c471a72a490583abe692c314dff080 | [
"MIT"
] | null | null | null | test/storage_test.exs | aforward/temporal | ec71f48719c471a72a490583abe692c314dff080 | [
"MIT"
] | null | null | null | defmodule Temporal.StorageTest do
use ExUnit.Case
doctest Temporal.Storage
alias Temporal.Storage
test "save based on basedir, frequency and source" do
p = Storage.path("/tmp", :yearly, "https://example.com/x.txt")
assert {:ok, p} == Storage.save({:ok, "apples"}, "/tmp", :yearly, "https://example.com/x.txt")
assert true == Storage.exists?("/tmp", :yearly, "https://example.com/x.txt")
assert true == Storage.exists?(p)
end
test "exists? based on basedir, frequency and source" do
p = Storage.path("/tmp", :yearly, "https://example.com/x.txt")
assert {:ok, p} == Storage.save({:ok, "apples"}, "/tmp", :yearly, "https://example.com/x.txt")
assert true == Storage.exists?("/tmp", :yearly, "https://example.com/x.txt")
assert false == Storage.exists?("/tmp", :monthly, "https://example.com/x.txt")
end
test "get based on basedir, frequency and source" do
p = Storage.path("/tmp", :yearly, "https://example.com/x.txt")
assert {:ok, p} == Storage.save({:ok, "apples"}, "/tmp", :yearly, "https://example.com/x.txt")
assert {:ok, "apples"} == Storage.get("/tmp", :yearly, "https://example.com/x.txt")
end
end
| 43.296296 | 98 | 0.635586 |
79206a89e30d72a3f195f77a689c2ed7d39cb0bc | 207 | exs | Elixir | test/_scripts/basic.exs | ityonemo/realbook | cc506144492b9344b540181a63ba35b6f5943c9c | [
"MIT"
] | 11 | 2020-07-13T20:43:43.000Z | 2021-05-18T23:52:27.000Z | test/_scripts/basic.exs | ityonemo/realbook | cc506144492b9344b540181a63ba35b6f5943c9c | [
"MIT"
] | 50 | 2020-07-13T02:20:14.000Z | 2020-08-17T16:45:17.000Z | test/_scripts/basic.exs | ityonemo/realbook | cc506144492b9344b540181a63ba35b6f5943c9c | [
"MIT"
] | 1 | 2021-03-11T17:02:21.000Z | 2021-03-11T17:02:21.000Z | # basic realbook that creates a directory in the temp directory.
def path, do: get(:dirname)
verify do
(run! "test -d #{path()}")
end
play do
log "creating #{path()}"
run! "mkdir -p #{path()}"
end
| 15.923077 | 64 | 0.63285 |
79206e5b2973ad93eb910afa3bccd628e6dae93d | 805 | ex | Elixir | lib/miotwo_web/controllers/train_controller.ex | jakswa/miotwo | b8455c0e019c952891f108dc5cd5ad134c57cbfc | [
"MIT"
] | 2 | 2018-08-24T18:09:34.000Z | 2020-07-20T01:42:37.000Z | lib/miotwo_web/controllers/train_controller.ex | jakswa/miotwo | b8455c0e019c952891f108dc5cd5ad134c57cbfc | [
"MIT"
] | null | null | null | lib/miotwo_web/controllers/train_controller.ex | jakswa/miotwo | b8455c0e019c952891f108dc5cd5ad134c57cbfc | [
"MIT"
] | 1 | 2018-05-16T22:57:57.000Z | 2018-05-16T22:57:57.000Z | defmodule MiotwoWeb.TrainController do
use MiotwoWeb, :controller
import ScoutApm.Tracing
alias Miotwo.TrainCache
# Proxy the raw response from MARTA,
# including content-type, status, and body.
def index(conn, %{}) do
case train_json() do
{:ok, resp} -> proxy(conn, resp)
{:error, %HTTPoison.Error{reason: reason}} ->
conn
|> put_status(:service_unavailable)
|> json(%{error: reason})
end
end
defp proxy(conn, resp) do
content_type = Enum.find(resp.headers, fn(i) ->
elem(i, 0) == "Content-Type"
end) |> elem(1)
conn
|> put_resp_content_type(content_type)
|> send_resp(resp.status_code, resp.body)
end
defp train_json do
timing("TrainCache", "train_json") do
TrainCache.train_json
end
end
end
| 23.676471 | 52 | 0.642236 |
79209905c75690963de7d12c56cf5f0d9cca5232 | 809 | ex | Elixir | lib/sipper/file_name_cleaner.ex | jordelver/rubytapas | 145b92a33e12c4b7f06d10819dafe309b395642d | [
"MIT"
] | 114 | 2015-09-18T10:55:37.000Z | 2021-02-20T01:49:49.000Z | lib/sipper/file_name_cleaner.ex | jordelver/rubytapas | 145b92a33e12c4b7f06d10819dafe309b395642d | [
"MIT"
] | 26 | 2015-09-18T07:03:13.000Z | 2017-11-06T12:35:27.000Z | lib/sipper/file_name_cleaner.ex | jordelver/rubytapas | 145b92a33e12c4b7f06d10819dafe309b395642d | [
"MIT"
] | 26 | 2015-09-19T03:46:16.000Z | 2018-10-14T21:39:22.000Z | defmodule Sipper.FileNameCleaner do
def clean(name) do
name
|> String.replace(":", "-") # Breaks on Windows, shown as "/" on OS X.
end
def migrate_unclean(dir, file, cb) do
files_in_dir = File.ls!(dir)
cleaned_files_in_dir = Enum.into(files_in_dir, %{}, &{clean(&1), &1})
cleaned_file = clean(file)
existing_version_of_current_file = Dict.get(cleaned_files_in_dir, cleaned_file)
if existing_version_of_current_file && existing_version_of_current_file != cleaned_file do
cb.(existing_version_of_current_file, cleaned_file)
mv!(dir, existing_version_of_current_file, cleaned_file)
end
end
defp mv!(dir, old_file, new_file) do
old_path = "#{dir}/#{old_file}"
new_path = "#{dir}/#{new_file}"
:ok = :file.rename(old_path, new_path)
end
end
| 29.962963 | 94 | 0.694685 |
79209e9e5fdf1bbfe0444a2255799f15487008b5 | 1,241 | exs | Elixir | test/pow/phoenix/mailer/mail_test.exs | abartier/pow | 58a3d082da093e2dc7f07825a950ee133204813f | [
"Unlicense",
"MIT"
] | null | null | null | test/pow/phoenix/mailer/mail_test.exs | abartier/pow | 58a3d082da093e2dc7f07825a950ee133204813f | [
"Unlicense",
"MIT"
] | null | null | null | test/pow/phoenix/mailer/mail_test.exs | abartier/pow | 58a3d082da093e2dc7f07825a950ee133204813f | [
"Unlicense",
"MIT"
] | null | null | null | defmodule Pow.Phoenix.MailerTemplate do
@moduledoc false
use Pow.Phoenix.Mailer.Template
template :mail_test,
"Test subject",
"""
<%= @value %> text
""",
"""
<%= content_tag(:h3, "\#{@value} HTML") %>
"""
end
defmodule Pow.Phoenix.MailerView do
@moduledoc false
use Pow.Phoenix.Mailer.View
end
defmodule Pow.Phoenix.Mailer.MailTest do
use ExUnit.Case
doctest Pow.Phoenix.Mailer.Mail
alias Plug.Conn
alias Pow.Phoenix.{Mailer.Mail, MailerView}
test "new/4" do
conn = %Conn{private: %{pow_config: []}}
assert mail = Mail.new(conn, :user, {MailerView, :mail_test}, value: "test")
assert mail.user == :user
assert mail.subject == "Test subject"
assert mail.html =~ "<h3>test HTML</h3>"
assert mail.text =~ "test text\n"
assert mail.assigns[:value] == "test"
end
test "new/4 with `:web_module`" do
conn = %Conn{private: %{pow_config: [web_mailer_module: Pow.Test.Phoenix]}}
assert mail = Mail.new(conn, :user, {MailerView, :mail_test}, value: "test")
assert mail.user == :user
assert mail.subject == ":web_mailer_module subject"
assert mail.html == "<p>:web_mailer_module html mail</p>"
assert mail.text == ":web_mailer_module text mail"
end
end
| 25.854167 | 80 | 0.662369 |
7920a72394c0b37bb4599a413c6c513f8e424a86 | 4,924 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/bubble_chart_spec.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/bubble_chart_spec.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/bubble_chart_spec.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Sheets.V4.Model.BubbleChartSpec do
@moduledoc """
A <a href="/chart/interactive/docs/gallery/bubblechart">bubble chart</a>.
## Attributes
* `bubbleBorderColor` (*type:* `GoogleApi.Sheets.V4.Model.Color.t`, *default:* `nil`) - The bubble border color.
* `bubbleBorderColorStyle` (*type:* `GoogleApi.Sheets.V4.Model.ColorStyle.t`, *default:* `nil`) - The bubble border color.
If bubble_border_color is also set, this field takes precedence.
* `bubbleLabels` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data containing the bubble labels. These do not need to be unique.
* `bubbleMaxRadiusSize` (*type:* `integer()`, *default:* `nil`) - The max radius size of the bubbles, in pixels.
If specified, the field must be a positive value.
* `bubbleMinRadiusSize` (*type:* `integer()`, *default:* `nil`) - The minimum radius size of the bubbles, in pixels.
If specific, the field must be a positive value.
* `bubbleOpacity` (*type:* `number()`, *default:* `nil`) - The opacity of the bubbles between 0 and 1.0.
0 is fully transparent and 1 is fully opaque.
* `bubbleSizes` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data contianing the bubble sizes. Bubble sizes are used to draw
the bubbles at different sizes relative to each other.
If specified, group_ids must also be specified. This field is
optional.
* `bubbleTextStyle` (*type:* `GoogleApi.Sheets.V4.Model.TextFormat.t`, *default:* `nil`) - The format of the text inside the bubbles.
Underline and Strikethrough are not supported.
* `domain` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data containing the bubble x-values. These values locate the bubbles
in the chart horizontally.
* `groupIds` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data containing the bubble group IDs. All bubbles with the same group
ID are drawn in the same color. If bubble_sizes is specified then
this field must also be specified but may contain blank values.
This field is optional.
* `legendPosition` (*type:* `String.t`, *default:* `nil`) - Where the legend of the chart should be drawn.
* `series` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data contianing the bubble y-values. These values locate the bubbles
in the chart vertically.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:bubbleBorderColor => GoogleApi.Sheets.V4.Model.Color.t(),
:bubbleBorderColorStyle => GoogleApi.Sheets.V4.Model.ColorStyle.t(),
:bubbleLabels => GoogleApi.Sheets.V4.Model.ChartData.t(),
:bubbleMaxRadiusSize => integer(),
:bubbleMinRadiusSize => integer(),
:bubbleOpacity => number(),
:bubbleSizes => GoogleApi.Sheets.V4.Model.ChartData.t(),
:bubbleTextStyle => GoogleApi.Sheets.V4.Model.TextFormat.t(),
:domain => GoogleApi.Sheets.V4.Model.ChartData.t(),
:groupIds => GoogleApi.Sheets.V4.Model.ChartData.t(),
:legendPosition => String.t(),
:series => GoogleApi.Sheets.V4.Model.ChartData.t()
}
field(:bubbleBorderColor, as: GoogleApi.Sheets.V4.Model.Color)
field(:bubbleBorderColorStyle, as: GoogleApi.Sheets.V4.Model.ColorStyle)
field(:bubbleLabels, as: GoogleApi.Sheets.V4.Model.ChartData)
field(:bubbleMaxRadiusSize)
field(:bubbleMinRadiusSize)
field(:bubbleOpacity)
field(:bubbleSizes, as: GoogleApi.Sheets.V4.Model.ChartData)
field(:bubbleTextStyle, as: GoogleApi.Sheets.V4.Model.TextFormat)
field(:domain, as: GoogleApi.Sheets.V4.Model.ChartData)
field(:groupIds, as: GoogleApi.Sheets.V4.Model.ChartData)
field(:legendPosition)
field(:series, as: GoogleApi.Sheets.V4.Model.ChartData)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.BubbleChartSpec do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.BubbleChartSpec.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.BubbleChartSpec do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 52.946237 | 162 | 0.70857 |
7920a9f0bc44c81550da57148106bff60cabd4e0 | 47,320 | ex | Elixir | lib/livebook_web/live/session_live.ex | Adzz/livebook | ab0a237ab632aa617d4c9d75fbfc553b57d91dbd | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/live/session_live.ex | Adzz/livebook | ab0a237ab632aa617d4c9d75fbfc553b57d91dbd | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/live/session_live.ex | Adzz/livebook | ab0a237ab632aa617d4c9d75fbfc553b57d91dbd | [
"Apache-2.0"
] | null | null | null | defmodule LivebookWeb.SessionLive do
use LivebookWeb, :live_view
import LivebookWeb.UserHelpers
import LivebookWeb.SessionHelpers
import Livebook.Utils, only: [access_by_id: 1]
alias LivebookWeb.SidebarHelpers
alias Livebook.{Sessions, Session, Delta, Notebook, Runtime, LiveMarkdown}
alias Livebook.Notebook.Cell
alias Livebook.JSInterop
@impl true
def mount(%{"id" => session_id}, _session, socket) do
# We use the tracked sessions to locate the session pid, but then
# we talk to the session process exclusively for getting all the information
case Sessions.fetch_session(session_id) do
{:ok, %{pid: session_pid}} ->
data =
if connected?(socket) do
data = Session.register_client(session_pid, self(), socket.assigns.current_user)
Phoenix.PubSub.subscribe(Livebook.PubSub, "sessions:#{session_id}")
data
else
Session.get_data(session_pid)
end
session = Session.get_by_pid(session_pid)
platform = platform_from_socket(socket)
{:ok,
socket
|> assign(
session: session,
platform: platform,
self: self(),
data_view: data_to_view(data),
autofocus_cell_id: autofocus_cell_id(data.notebook),
empty_default_runtime: Livebook.Config.default_runtime() |> elem(0) |> struct()
)
|> assign_private(data: data)
|> allow_upload(:cell_image,
accept: ~w(.jpg .jpeg .png .gif),
max_entries: 1,
max_file_size: 5_000_000
)}
:error ->
{:ok, redirect(socket, to: Routes.home_path(socket, :page))}
end
end
# Puts the given assigns in `socket.private`,
# to ensure they are not used for rendering.
defp assign_private(socket, assigns) do
Enum.reduce(assigns, socket, fn {key, value}, socket ->
put_in(socket.private[key], value)
end)
end
defp platform_from_socket(socket) do
with connect_info when connect_info != nil <- get_connect_info(socket),
{:ok, user_agent} <- Map.fetch(connect_info, :user_agent) do
platform_from_user_agent(user_agent)
else
_ -> nil
end
end
@impl true
def render(assigns) do
~H"""
<div class="flex flex-grow h-full"
id={"session-#{@session.id}"}
data-element="session"
phx-hook="Session"
data-global-status={elem(@data_view.global_status, 0)}
data-autofocus-cell-id={@autofocus_cell_id}>
<SidebarHelpers.sidebar>
<SidebarHelpers.logo_item socket={@socket} />
<SidebarHelpers.button_item
icon="booklet-fill"
label="Sections (ss)"
data_element="sections-list-toggle" />
<SidebarHelpers.button_item
icon="group-fill"
label="Connected users (su)"
data_element="clients-list-toggle" />
<SidebarHelpers.button_item
icon="cpu-line"
label="Runtime settings (sr)"
data_element="runtime-info-toggle" />
<SidebarHelpers.link_item
icon="delete-bin-6-fill"
label="Bin (sb)"
path={Routes.session_path(@socket, :bin, @session.id)}
active={@live_action == :bin} />
<SidebarHelpers.break_item />
<SidebarHelpers.link_item
icon="keyboard-box-fill"
label="Keyboard shortcuts (?)"
path={Routes.session_path(@socket, :shortcuts, @session.id)}
active={@live_action == :shortcuts} />
<SidebarHelpers.user_item
current_user={@current_user}
path={Routes.session_path(@socket, :user, @session.id)} />
</SidebarHelpers.sidebar>
<div class="flex flex-col h-full w-full max-w-xs absolute z-30 top-0 left-[64px] overflow-y-auto shadow-xl md:static md:shadow-none bg-gray-50 border-r border-gray-100 px-6 py-10"
data-element="side-panel">
<div data-element="sections-list">
<.sections_list data_view={@data_view} />
</div>
<div data-element="clients-list">
<.clients_list data_view={@data_view} self={@self} />
</div>
<div data-element="runtime-info">
<.runtime_info data_view={@data_view} session={@session} socket={@socket} empty_default_runtime={@empty_default_runtime} />
</div>
</div>
<div class="flex-grow overflow-y-auto scroll-smooth" data-element="notebook">
<div class="w-full max-w-screen-lg px-16 mx-auto py-7">
<div class="flex items-center pb-4 mb-6 space-x-4 border-b border-gray-200"
data-element="notebook-headline"
data-focusable-id="notebook"
id="notebook"
phx-hook="Headline"
data-on-value-change="set_notebook_name"
data-metadata="notebook">
<h1 class="flex-grow p-1 -ml-1 text-3xl font-semibold text-gray-800 border border-transparent rounded-lg whitespace-pre-wrap"
tabindex="0"
id="notebook-heading"
data-element="heading"
spellcheck="false"><%= @data_view.notebook_name %></h1>
<.menu id="session-menu">
<:toggle>
<button class="icon-button" aria-label="open notebook menu">
<.remix_icon icon="more-2-fill" class="text-xl" />
</button>
</:toggle>
<:content>
<%= live_patch to: Routes.session_path(@socket, :export, @session.id, "livemd"),
class: "menu-item text-gray-500",
role: "menuitem" do %>
<.remix_icon icon="download-2-line" />
<span class="font-medium">Export</span>
<% end %>
<button class="menu-item text-gray-500"
role="menuitem"
phx-click="erase_outputs">
<.remix_icon icon="eraser-fill" />
<span class="font-medium">Erase outputs</span>
</button>
<button class="menu-item text-gray-500"
role="menuitem"
phx-click="fork_session">
<.remix_icon icon="git-branch-line" />
<span class="font-medium">Fork</span>
</button>
<a class="menu-item text-gray-500"
role="menuitem"
href={live_dashboard_process_path(@socket, @session.pid)}
target="_blank">
<.remix_icon icon="dashboard-2-line" />
<span class="font-medium">See on Dashboard</span>
</a>
<%= live_patch to: Routes.home_path(@socket, :close_session, @session.id),
class: "menu-item text-red-600",
role: "menuitem" do %>
<.remix_icon icon="close-circle-line" />
<span class="font-medium">Close</span>
<% end %>
</:content>
</.menu>
</div>
<div class="flex flex-col w-full space-y-16">
<%= if @data_view.section_views == [] do %>
<div class="flex justify-center">
<button class="button-base button-small"
phx-click="append_section">
+ Section
</button>
</div>
<% end %>
<%= for {section_view, index} <- Enum.with_index(@data_view.section_views) do %>
<.live_component module={LivebookWeb.SessionLive.SectionComponent}
id={section_view.id}
index={index}
session_id={@session.id}
runtime={@data_view.runtime}
section_view={section_view} />
<% end %>
<div style="height: 80vh"></div>
</div>
</div>
</div>
<div class="fixed bottom-[0.4rem] right-[1.5rem]">
<LivebookWeb.SessionLive.IndicatorsComponent.render
socket={@socket}
session_id={@session.id}
file={@data_view.file}
dirty={@data_view.dirty}
autosave_interval_s={@data_view.autosave_interval_s}
runtime={@data_view.runtime}
global_status={@data_view.global_status} />
</div>
</div>
<%= if @live_action == :user do %>
<.current_user_modal
return_to={Routes.session_path(@socket, :page, @session.id)}
current_user={@current_user} />
<% end %>
<%= if @live_action == :runtime_settings do %>
<.modal class="w-full max-w-4xl" return_to={Routes.session_path(@socket, :page, @session.id)}>
<.live_component module={LivebookWeb.SessionLive.RuntimeComponent}
id="runtime-settings"
session={@session}
runtime={@data_view.runtime} />
</.modal>
<% end %>
<%= if @live_action == :file_settings do %>
<.modal class="w-full max-w-4xl" return_to={Routes.session_path(@socket, :page, @session.id)}>
<%= live_render @socket, LivebookWeb.SessionLive.PersistenceLive,
id: "persistence",
session: %{
"session" => @session,
"file" => @data_view.file,
"persist_outputs" => @data_view.persist_outputs,
"autosave_interval_s" => @data_view.autosave_interval_s
} %>
</.modal>
<% end %>
<%= if @live_action == :shortcuts do %>
<.modal class="w-full max-w-6xl" return_to={Routes.session_path(@socket, :page, @session.id)}>
<.live_component module={LivebookWeb.SessionLive.ShortcutsComponent}
id="shortcuts"
platform={@platform} />
</.modal>
<% end %>
<%= if @live_action == :cell_settings do %>
<.modal class="w-full max-w-xl" return_to={Routes.session_path(@socket, :page, @session.id)}>
<.live_component module={settings_component_for(@cell)}
id="cell-settings"
session={@session}
return_to={Routes.session_path(@socket, :page, @session.id)}
cell={@cell} />
</.modal>
<% end %>
<%= if @live_action == :cell_upload do %>
<.modal class="w-full max-w-xl" return_to={Routes.session_path(@socket, :page, @session.id)}>
<.live_component module={LivebookWeb.SessionLive.CellUploadComponent}
id="cell-upload"
session={@session}
return_to={Routes.session_path(@socket, :page, @session.id)}
cell={@cell}
uploads={@uploads} />
</.modal>
<% end %>
<%= if @live_action == :delete_section do %>
<.modal class="w-full max-w-xl" return_to={Routes.session_path(@socket, :page, @session.id)}>
<.live_component module={LivebookWeb.SessionLive.DeleteSectionComponent}
id="delete-section"
session={@session}
return_to={Routes.session_path(@socket, :page, @session.id)}
section={@section}
is_first={@section.id == @first_section_id} />
</.modal>
<% end %>
<%= if @live_action == :bin do %>
<.modal class="w-full max-w-4xl" return_to={Routes.session_path(@socket, :page, @session.id)}>
<.live_component module={LivebookWeb.SessionLive.BinComponent}
id="bin"
session={@session}
return_to={Routes.session_path(@socket, :page, @session.id)}
bin_entries={@data_view.bin_entries} />
</.modal>
<% end %>
<%= if @live_action == :export do %>
<.modal class="w-full max-w-4xl" return_to={Routes.session_path(@socket, :page, @session.id)}>
<.live_component module={LivebookWeb.SessionLive.ExportComponent}
id="export"
session={@session}
tab={@tab} />
</.modal>
<% end %>
"""
end
defp sections_list(assigns) do
~H"""
<div class="flex flex-col flex-grow">
<h3 class="uppercase text-sm font-semibold text-gray-500">
Sections
</h3>
<div class="flex flex-col mt-4 space-y-4">
<%= for section_item <- @data_view.sections_items do %>
<div class="flex items-center">
<button class="flex-grow flex items-center text-gray-500 hover:text-gray-900 text-left"
data-element="sections-list-item"
data-section-id={section_item.id}>
<span class="flex items-center space-x-1">
<span><%= section_item.name %></span>
<%= if section_item.parent do %>
<%# Note: the container has overflow-y auto, so we cannot set overflow-x visible,
consequently we show the tooltip wrapped to a fixed number of characters %>
<span {branching_tooltip_attrs(section_item.name, section_item.parent.name)}>
<.remix_icon icon="git-branch-line" class="text-lg font-normal leading-none flip-horizontally" />
</span>
<% end %>
</span>
</button>
<.session_status status={elem(section_item.status, 0)} cell_id={elem(section_item.status, 1)} />
</div>
<% end %>
</div>
<button class="inline-flex items-center justify-center p-8 py-1 mt-8 space-x-2 text-sm font-medium text-gray-500 border border-gray-400 border-dashed rounded-xl hover:bg-gray-100"
phx-click="append_section">
<.remix_icon icon="add-line" class="text-lg align-center" />
<span>New section</span>
</button>
</div>
"""
end
defp clients_list(assigns) do
~H"""
<div class="flex flex-col flex-grow">
<div class="flex items-center justify-between space-x-4">
<h3 class="uppercase text-sm font-semibold text-gray-500">
Users
</h3>
<span class="flex items-center px-2 py-1 space-x-2 text-sm bg-gray-200 rounded-lg">
<span class="inline-flex w-3 h-3 bg-green-600 rounded-full"></span>
<span><%= length(@data_view.clients) %> connected</span>
</span>
</div>
<div class="flex flex-col mt-4 space-y-4">
<%= for {client_pid, user} <- @data_view.clients do %>
<div class="flex items-center justify-between space-x-2"
id={"clients-list-item-#{inspect(client_pid)}"}
data-element="clients-list-item"
data-client-pid={inspect(client_pid)}>
<button class="flex items-center space-x-2 text-gray-500 hover:text-gray-900 disabled:pointer-events-none"
disabled={client_pid == @self}
data-element="client-link">
<.user_avatar user={user} class="flex-shrink-0 h-7 w-7" text_class="text-xs" />
<span><%= user.name || "Anonymous" %></span>
</button>
<%= if client_pid != @self do %>
<span class="tooltip left" data-tooltip="Follow this user"
data-element="client-follow-toggle"
data-meta="follow">
<button class="icon-button" aria-label="follow this user">
<.remix_icon icon="pushpin-line" class="text-lg" />
</button>
</span>
<span class="tooltip left" data-tooltip="Unfollow this user"
data-element="client-follow-toggle"
data-meta="unfollow">
<button class="icon-button" aria-label="unfollow this user">
<.remix_icon icon="pushpin-fill" class="text-lg" />
</button>
</span>
<% end %>
</div>
<% end %>
</div>
</div>
"""
end
defp runtime_info(assigns) do
~H"""
<div class="flex flex-col flex-grow">
<div class="flex items-center justify-between">
<h3 class="uppercase text-sm font-semibold text-gray-500">
Runtime
</h3>
<%= live_patch to: Routes.session_path(@socket, :runtime_settings, @session.id),
class: "icon-button",
type: "button" do %>
<.remix_icon icon="settings-3-line text-xl" />
<% end %>
</div>
<div class="flex flex-col mt-4 space-y-4">
<%= if @data_view.runtime do %>
<div class="flex flex-col space-y-3">
<.labeled_text label="Type" text={runtime_type_label(@data_view.runtime)} />
<.labeled_text label="Node name" text={@data_view.runtime.node} one_line={true} />
</div>
<div class="flex flex-col space-y-3">
<div class="flex space-x-2">
<button class="button-base button-blue" phx-click="restart_runtime">
<.remix_icon icon="wireless-charging-line" class="align-middle mr-1" />
<span>Reconnect</span>
</button>
<button class="button-base button-outlined-red"
type="button"
phx-click="disconnect_runtime">
Disconnect
</button>
</div>
</div>
<% else %>
<div class="flex flex-col space-y-3">
<.labeled_text label="Type" text={runtime_type_label(@empty_default_runtime)} />
</div>
<div class="flex space-x-2">
<button class="button-base button-blue" phx-click="connect_default_runtime">
<.remix_icon icon="wireless-charging-line" class="align-middle mr-1" />
<span>Connect</span>
</button>
<%= live_patch to: Routes.session_path(@socket, :runtime_settings, @session.id),
class: "button-base button-outlined-gray bg-transparent",
type: "button" do %>
Configure
<% end %>
</div>
<% end %>
</div>
</div>
"""
end
defp runtime_type_label(%Runtime.ElixirStandalone{}), do: "Elixir standalone"
defp runtime_type_label(%Runtime.MixStandalone{}), do: "Mix standalone"
defp runtime_type_label(%Runtime.Attached{}), do: "Attached"
defp runtime_type_label(%Runtime.Embedded{}), do: "Embedded"
defp session_status(%{status: :evaluating} = assigns) do
~H"""
<button data-element="focus-cell-button" data-target={@cell_id}>
<.status_indicator circle_class="bg-blue-500" animated_circle_class="bg-blue-400">
</.status_indicator>
</button>
"""
end
defp session_status(%{status: :stale} = assigns) do
~H"""
<button data-element="focus-cell-button" data-target={@cell_id}>
<.status_indicator circle_class="bg-yellow-200">
</.status_indicator>
</button>
"""
end
defp session_status(assigns), do: ~H""
defp status_indicator(assigns) do
assigns = assign_new(assigns, :animated_circle_class, fn -> nil end)
~H"""
<div class="flex items-center space-x-1">
<span class="flex relative h-3 w-3">
<%= if @animated_circle_class do %>
<span class={"#{@animated_circle_class} animate-ping absolute inline-flex h-3 w-3 rounded-full opacity-75"}></span>
<% end %>
<span class={"#{@circle_class} relative inline-flex rounded-full h-3 w-3"}></span>
</span>
</div>
"""
end
defp settings_component_for(%Cell.Elixir{}),
do: LivebookWeb.SessionLive.ElixirCellSettingsComponent
defp branching_tooltip_attrs(name, parent_name) do
direction = if String.length(name) >= 16, do: "left", else: "right"
wrapped_name = Livebook.Utils.wrap_line("”" <> parent_name <> "”", 16)
label = "Branches from\n#{wrapped_name}"
[class: "tooltip #{direction}", data_tooltip: label]
end
@impl true
def handle_params(%{"cell_id" => cell_id}, _url, socket) do
{:ok, cell, _} = Notebook.fetch_cell_and_section(socket.private.data.notebook, cell_id)
{:noreply, assign(socket, cell: cell)}
end
def handle_params(%{"section_id" => section_id}, _url, socket) do
{:ok, section} = Notebook.fetch_section(socket.private.data.notebook, section_id)
first_section_id = hd(socket.private.data.notebook.sections).id
{:noreply, assign(socket, section: section, first_section_id: first_section_id)}
end
def handle_params(
%{"path_parts" => path_parts},
_url,
%{assigns: %{live_action: :catch_all}} = socket
) do
path_parts =
Enum.map(path_parts, fn
"__parent__" -> ".."
part -> part
end)
path = Path.join(path_parts)
{:noreply, handle_relative_path(socket, path)}
end
def handle_params(%{"tab" => tab}, _url, socket) do
{:noreply, assign(socket, tab: tab)}
end
def handle_params(_params, _url, socket) do
{:noreply, socket}
end
@impl true
def handle_event("session_init", _params, socket) do
data = socket.private.data
payload = %{
clients:
Enum.map(data.clients_map, fn {client_pid, user_id} ->
client_info(client_pid, data.users_map[user_id])
end)
}
{:reply, payload, socket}
end
def handle_event("cell_init", %{"cell_id" => cell_id}, socket) do
data = socket.private.data
case Notebook.fetch_cell_and_section(data.notebook, cell_id) do
{:ok, cell, _section} ->
info = data.cell_infos[cell.id]
payload = %{
source: cell.source,
revision: info.revision,
evaluation_digest: encode_digest(info.evaluation_digest)
}
# From this point on we don't need cell source in the LV,
# so we are going to drop it altogether
socket = remove_cell_source(socket, cell_id)
{:reply, payload, socket}
:error ->
{:noreply, socket}
end
end
def handle_event("append_section", %{}, socket) do
idx = length(socket.private.data.notebook.sections)
Session.insert_section(socket.assigns.session.pid, idx)
{:noreply, socket}
end
def handle_event("insert_section_below", params, socket) do
with {:ok, section, index} <-
section_with_next_index(
socket.private.data.notebook,
params["section_id"],
params["cell_id"]
) do
Session.insert_section_into(socket.assigns.session.pid, section.id, index)
end
{:noreply, socket}
end
def handle_event(
"set_section_parent",
%{"section_id" => section_id, "parent_id" => parent_id},
socket
) do
Session.set_section_parent(socket.assigns.session.pid, section_id, parent_id)
{:noreply, socket}
end
def handle_event("unset_section_parent", %{"section_id" => section_id}, socket) do
Session.unset_section_parent(socket.assigns.session.pid, section_id)
{:noreply, socket}
end
def handle_event("insert_cell_below", %{"type" => type} = params, socket) do
type = String.to_atom(type)
with {:ok, section, index} <-
section_with_next_index(
socket.private.data.notebook,
params["section_id"],
params["cell_id"]
) do
Session.insert_cell(socket.assigns.session.pid, section.id, index, type)
end
{:noreply, socket}
end
def handle_event("delete_cell", %{"cell_id" => cell_id}, socket) do
Session.delete_cell(socket.assigns.session.pid, cell_id)
{:noreply, socket}
end
def handle_event("set_notebook_name", %{"value" => name}, socket) do
name = normalize_name(name)
Session.set_notebook_name(socket.assigns.session.pid, name)
{:noreply, socket}
end
def handle_event("set_section_name", %{"metadata" => section_id, "value" => name}, socket) do
name = normalize_name(name)
Session.set_section_name(socket.assigns.session.pid, section_id, name)
{:noreply, socket}
end
def handle_event(
"apply_cell_delta",
%{"cell_id" => cell_id, "delta" => delta, "revision" => revision},
socket
) do
delta = Delta.from_compressed(delta)
Session.apply_cell_delta(socket.assigns.session.pid, cell_id, delta, revision)
{:noreply, socket}
end
def handle_event(
"report_cell_revision",
%{"cell_id" => cell_id, "revision" => revision},
socket
) do
Session.report_cell_revision(socket.assigns.session.pid, cell_id, revision)
{:noreply, socket}
end
def handle_event("move_cell", %{"cell_id" => cell_id, "offset" => offset}, socket) do
offset = ensure_integer(offset)
Session.move_cell(socket.assigns.session.pid, cell_id, offset)
{:noreply, socket}
end
def handle_event("move_section", %{"section_id" => section_id, "offset" => offset}, socket) do
offset = ensure_integer(offset)
Session.move_section(socket.assigns.session.pid, section_id, offset)
{:noreply, socket}
end
def handle_event("queue_cell_evaluation", %{"cell_id" => cell_id}, socket) do
Session.queue_cell_evaluation(socket.assigns.session.pid, cell_id)
{:noreply, socket}
end
def handle_event("queue_section_cells_evaluation", %{"section_id" => section_id}, socket) do
with {:ok, section} <- Notebook.fetch_section(socket.private.data.notebook, section_id) do
for cell <- section.cells, is_struct(cell, Cell.Elixir) do
Session.queue_cell_evaluation(socket.assigns.session.pid, cell.id)
end
end
{:noreply, socket}
end
def handle_event("queue_all_cells_evaluation", _params, socket) do
data = socket.private.data
for {cell, _} <- Notebook.elixir_cells_with_section(data.notebook),
data.cell_infos[cell.id].validity_status != :evaluated do
Session.queue_cell_evaluation(socket.assigns.session.pid, cell.id)
end
{:noreply, socket}
end
def handle_event("cancel_cell_evaluation", %{"cell_id" => cell_id}, socket) do
Session.cancel_cell_evaluation(socket.assigns.session.pid, cell_id)
{:noreply, socket}
end
def handle_event("save", %{}, socket) do
if socket.private.data.file do
Session.save(socket.assigns.session.pid)
{:noreply, socket}
else
{:noreply,
push_patch(socket,
to: Routes.session_path(socket, :file_settings, socket.assigns.session.id)
)}
end
end
def handle_event("show_shortcuts", %{}, socket) do
{:noreply,
push_patch(socket, to: Routes.session_path(socket, :shortcuts, socket.assigns.session.id))}
end
def handle_event("show_bin", %{}, socket) do
{:noreply,
push_patch(socket, to: Routes.session_path(socket, :bin, socket.assigns.session.id))}
end
def handle_event("restart_runtime", %{}, socket) do
socket =
if runtime = socket.private.data.runtime do
case Runtime.duplicate(runtime) do
{:ok, new_runtime} ->
Session.connect_runtime(socket.assigns.session.pid, new_runtime)
socket
{:error, message} ->
put_flash(socket, :error, "Failed to setup runtime - #{message}")
end
else
socket
end
{:noreply, socket}
end
def handle_event("connect_default_runtime", %{}, socket) do
{runtime_module, args} = Livebook.Config.default_runtime()
socket =
case apply(runtime_module, :init, args) do
{:ok, runtime} ->
Session.connect_runtime(socket.assigns.session.pid, runtime)
socket
{:error, message} ->
put_flash(socket, :error, "Failed to setup runtime - #{message}")
end
{:noreply, socket}
end
def handle_event("disconnect_runtime", %{}, socket) do
Session.disconnect_runtime(socket.assigns.session.pid)
{:noreply, socket}
end
def handle_event("intellisense_request", %{"cell_id" => cell_id} = params, socket) do
request =
case params do
%{"type" => "completion", "hint" => hint} ->
{:completion, hint}
%{"type" => "details", "line" => line, "column" => column} ->
column = JSInterop.js_column_to_elixir(column, line)
{:details, line, column}
%{"type" => "signature", "hint" => hint} ->
{:signature, hint}
%{"type" => "format", "code" => code} ->
{:format, code}
end
data = socket.private.data
with {:ok, cell, section} <- Notebook.fetch_cell_and_section(data.notebook, cell_id) do
if data.runtime do
ref = make_ref()
prev_locator = Session.find_prev_locator(data.notebook, cell, section)
Runtime.handle_intellisense(data.runtime, self(), ref, request, prev_locator)
{:reply, %{"ref" => inspect(ref)}, socket}
else
info =
case params["type"] do
"completion" ->
"You need to start a runtime (or evaluate a cell) for code completion"
"format" ->
"You need to start a runtime (or evaluate a cell) to enable code formatting"
_ ->
nil
end
socket = if info, do: put_flash(socket, :info, info), else: socket
{:reply, %{"ref" => nil}, socket}
end
else
_ -> {:noreply, socket}
end
end
def handle_event("fork_session", %{}, socket) do
%{pid: pid, images_dir: images_dir} = socket.assigns.session
# Fetch the data, as we don't keep cells' source in the state
data = Session.get_data(pid)
notebook = Notebook.forked(data.notebook)
{:noreply, create_session(socket, notebook: notebook, copy_images_from: images_dir)}
end
def handle_event("erase_outputs", %{}, socket) do
Session.erase_outputs(socket.assigns.session.pid)
{:noreply, socket}
end
def handle_event("location_report", report, socket) do
Phoenix.PubSub.broadcast_from(
Livebook.PubSub,
self(),
"sessions:#{socket.assigns.session.id}",
{:location_report, self(), report}
)
{:noreply, socket}
end
def handle_event("format_code", %{"code" => code}, socket) do
formatted =
try do
code
|> Code.format_string!()
|> IO.iodata_to_binary()
rescue
_ -> code
end
{:reply, %{code: formatted}, socket}
end
@impl true
def handle_info({:operation, operation}, socket) do
case Session.Data.apply_operation(socket.private.data, operation) do
{:ok, data, actions} ->
new_socket =
socket
|> assign_private(data: data)
|> assign(data_view: update_data_view(socket.assigns.data_view, data, operation))
|> after_operation(socket, operation)
|> handle_actions(actions)
{:noreply, new_socket}
:error ->
{:noreply, socket}
end
end
def handle_info({:error, error}, socket) do
message = error |> to_string() |> upcase_first()
{:noreply, put_flash(socket, :error, message)}
end
def handle_info({:info, info}, socket) do
message = info |> to_string() |> upcase_first()
{:noreply, put_flash(socket, :info, message)}
end
def handle_info({:hydrate_bin_entries, hydrated_entries}, socket) do
hydrated_entries_map = Map.new(hydrated_entries, fn entry -> {entry.cell.id, entry} end)
data =
Map.update!(socket.private.data, :bin_entries, fn bin_entries ->
Enum.map(bin_entries, fn entry ->
case Map.fetch(hydrated_entries_map, entry.cell.id) do
{:ok, hydrated_entry} -> hydrated_entry
:error -> entry
end
end)
end)
{:noreply,
socket
|> assign_private(data: data)
|> assign(data_view: data_to_view(data))}
end
def handle_info({:session_updated, session}, socket) do
{:noreply, assign(socket, :session, session)}
end
def handle_info(:session_closed, socket) do
{:noreply,
socket
|> put_flash(:info, "Session has been closed")
|> push_redirect(to: Routes.home_path(socket, :page))}
end
def handle_info({:intellisense_response, ref, request, response}, socket) do
response = process_intellisense_response(response, request)
payload = %{"ref" => inspect(ref), "response" => response}
{:noreply, push_event(socket, "intellisense_response", payload)}
end
def handle_info({:location_report, client_pid, report}, socket) do
report = Map.put(report, :client_pid, inspect(client_pid))
{:noreply, push_event(socket, "location_report", report)}
end
def handle_info({:set_input_value, input_id, value}, socket) do
Session.set_input_value(socket.assigns.session.pid, input_id, value)
{:noreply, socket}
end
def handle_info({:queue_bound_cells_evaluation, input_id}, socket) do
for {bound_cell, _} <- Session.Data.bound_cells_with_section(socket.private.data, input_id) do
Session.queue_cell_evaluation(socket.assigns.session.pid, bound_cell.id)
end
{:noreply, socket}
end
def handle_info(_message, socket), do: {:noreply, socket}
defp handle_relative_path(socket, path) do
cond do
String.ends_with?(path, LiveMarkdown.extension()) ->
handle_relative_notebook_path(socket, path)
true ->
socket
|> put_flash(
:error,
"Got unrecognised session path: #{path}\nIf you want to link another notebook, make sure to include the .livemd extension"
)
|> redirect_to_self()
end
end
defp handle_relative_notebook_path(socket, relative_path) do
resolution_location = location(socket.private.data)
case resolution_location do
nil ->
socket
|> put_flash(
:info,
"Cannot resolve notebook path #{relative_path}, because the current notebook has no location"
)
|> redirect_to_self()
resolution_location ->
origin = Livebook.ContentLoader.resolve_location(resolution_location, relative_path)
case session_id_by_location(origin) do
{:ok, session_id} ->
push_redirect(socket, to: Routes.session_path(socket, :page, session_id))
{:error, :none} ->
open_notebook(socket, origin)
{:error, :many} ->
origin_str =
case origin do
{:url, url} -> url
{:file, file} -> file.path
end
socket
|> put_flash(
:error,
"Cannot navigate, because multiple sessions were found for #{origin_str}"
)
|> redirect_to_self()
end
end
end
defp location(data)
defp location(%{file: file}) when is_map(file), do: {:file, file}
defp location(%{origin: origin}), do: origin
defp open_notebook(socket, origin) do
case Livebook.ContentLoader.fetch_content_from_location(origin) do
{:ok, content} ->
{notebook, messages} = Livebook.LiveMarkdown.Import.notebook_from_markdown(content)
# If the current session has no path, fork the notebook
fork? = socket.private.data.file == nil
{file, notebook} = file_and_notebook(fork?, origin, notebook)
socket
|> put_import_warnings(messages)
|> create_session(notebook: notebook, origin: origin, file: file)
{:error, message} ->
socket
|> put_flash(:error, "Cannot navigate, " <> message)
|> redirect_to_self()
end
end
defp file_and_notebook(fork?, origin, notebook)
defp file_and_notebook(false, {:file, file}, notebook), do: {file, notebook}
defp file_and_notebook(true, {:file, _file}, notebook), do: {nil, Notebook.forked(notebook)}
defp file_and_notebook(_fork?, _origin, notebook), do: {nil, notebook}
defp session_id_by_location(location) do
sessions = Sessions.list_sessions()
session_with_file =
Enum.find(sessions, fn session ->
session.file && {:file, session.file} == location
end)
# A session associated with the given file takes
# precedence over sessions originating from this file
if session_with_file do
{:ok, session_with_file.id}
else
sessions
|> Enum.filter(fn session -> session.origin == location end)
|> case do
[session] -> {:ok, session.id}
[] -> {:error, :none}
_ -> {:error, :many}
end
end
end
defp redirect_to_self(socket) do
push_patch(socket, to: Routes.session_path(socket, :page, socket.assigns.session.id))
end
defp after_operation(socket, _prev_socket, {:client_join, client_pid, user}) do
push_event(socket, "client_joined", %{client: client_info(client_pid, user)})
end
defp after_operation(socket, _prev_socket, {:client_leave, client_pid}) do
push_event(socket, "client_left", %{client_pid: inspect(client_pid)})
end
defp after_operation(socket, _prev_socket, {:update_user, _client_pid, user}) do
updated_clients =
socket.private.data.clients_map
|> Enum.filter(fn {_client_pid, user_id} -> user_id == user.id end)
|> Enum.map(fn {client_pid, _user_id} -> client_info(client_pid, user) end)
push_event(socket, "clients_updated", %{clients: updated_clients})
end
defp after_operation(socket, _prev_socket, {:insert_section, client_pid, _index, section_id}) do
if client_pid == self() do
push_event(socket, "section_inserted", %{section_id: section_id})
else
socket
end
end
defp after_operation(
socket,
_prev_socket,
{:insert_section_into, client_pid, _section_id, _index, section_id}
) do
if client_pid == self() do
push_event(socket, "section_inserted", %{section_id: section_id})
else
socket
end
end
defp after_operation(
socket,
_prev_socket,
{:delete_section, _client_pid, section_id, _delete_cells}
) do
push_event(socket, "section_deleted", %{section_id: section_id})
end
defp after_operation(socket, _prev_socket, {:insert_cell, client_pid, _, _, _, cell_id}) do
if client_pid == self() do
push_event(socket, "cell_inserted", %{cell_id: cell_id})
else
socket
end
end
defp after_operation(socket, prev_socket, {:delete_cell, _client_pid, cell_id}) do
# Find a sibling cell that the client would focus if the deleted cell has focus.
sibling_cell_id =
case Notebook.fetch_cell_sibling(prev_socket.private.data.notebook, cell_id, 1) do
{:ok, next_cell} ->
next_cell.id
:error ->
case Notebook.fetch_cell_sibling(prev_socket.private.data.notebook, cell_id, -1) do
{:ok, previous_cell} -> previous_cell.id
:error -> nil
end
end
push_event(socket, "cell_deleted", %{cell_id: cell_id, sibling_cell_id: sibling_cell_id})
end
defp after_operation(socket, _prev_socket, {:restore_cell, client_pid, cell_id}) do
if client_pid == self() do
push_event(socket, "cell_restored", %{cell_id: cell_id})
else
socket
end
end
defp after_operation(socket, _prev_socket, {:move_cell, client_pid, cell_id, _offset}) do
if client_pid == self() do
push_event(socket, "cell_moved", %{cell_id: cell_id})
else
socket
end
end
defp after_operation(socket, _prev_socket, {:move_section, client_pid, section_id, _offset}) do
if client_pid == self() do
push_event(socket, "section_moved", %{section_id: section_id})
else
socket
end
end
defp after_operation(
socket,
_prev_socket,
{:evaluation_started, _client_pid, cell_id, evaluation_digest}
) do
push_event(socket, "evaluation_started:#{cell_id}", %{
evaluation_digest: encode_digest(evaluation_digest)
})
end
defp after_operation(socket, _prev_socket, _operation), do: socket
defp handle_actions(socket, actions) do
Enum.reduce(actions, socket, &handle_action(&2, &1))
end
defp handle_action(socket, {:broadcast_delta, client_pid, cell, delta}) do
if client_pid == self() do
push_event(socket, "cell_acknowledgement:#{cell.id}", %{})
else
push_event(socket, "cell_delta:#{cell.id}", %{delta: Delta.to_compressed(delta)})
end
end
defp handle_action(socket, _action), do: socket
defp client_info(pid, user) do
%{pid: inspect(pid), hex_color: user.hex_color, name: user.name || "Anonymous"}
end
defp normalize_name(name) do
name
|> String.trim()
|> String.replace(~r/\s+/, " ")
|> case do
"" -> "Untitled"
name -> name
end
end
def upcase_first(string) do
{head, tail} = String.split_at(string, 1)
String.upcase(head) <> tail
end
defp section_with_next_index(notebook, section_id, cell_id)
defp section_with_next_index(notebook, section_id, nil) do
with {:ok, section} <- Notebook.fetch_section(notebook, section_id) do
{:ok, section, 0}
end
end
defp section_with_next_index(notebook, _section_id, cell_id) do
with {:ok, cell, section} <- Notebook.fetch_cell_and_section(notebook, cell_id) do
index = Enum.find_index(section.cells, &(&1 == cell))
{:ok, section, index + 1}
end
end
defp ensure_integer(n) when is_integer(n), do: n
defp ensure_integer(n) when is_binary(n), do: String.to_integer(n)
defp encode_digest(nil), do: nil
defp encode_digest(digest), do: Base.encode64(digest)
defp remove_cell_source(socket, cell_id) do
update_in(socket.private.data.notebook, fn notebook ->
Notebook.update_cell(notebook, cell_id, &%{&1 | source: nil})
end)
end
defp process_intellisense_response(
%{range: %{from: from, to: to}} = response,
{:details, line, _column}
) do
%{
response
| range: %{
from: JSInterop.elixir_column_to_js(from, line),
to: JSInterop.elixir_column_to_js(to, line)
}
}
end
# Currently we don't use signature docs, so we optimise the response
# to exclude them
defp process_intellisense_response(
%{signature_items: signature_items} = response,
{:signature, _hint}
) do
%{response | signature_items: Enum.map(signature_items, &%{&1 | documentation: nil})}
end
defp process_intellisense_response(response, _request), do: response
defp autofocus_cell_id(%Notebook{sections: [%{cells: [%{id: id, source: ""}]}]}), do: id
defp autofocus_cell_id(_notebook), do: nil
# Builds view-specific structure of data by cherry-picking
# only the relevant attributes.
# We then use `@data_view` in the templates and consequently
# irrelevant changes to data don't change `@data_view`, so LV doesn't
# have to traverse the whole template tree and no diff is sent to the client.
defp data_to_view(data) do
%{
file: data.file,
persist_outputs: data.notebook.persist_outputs,
autosave_interval_s: data.notebook.autosave_interval_s,
dirty: data.dirty,
runtime: data.runtime,
global_status: global_status(data),
notebook_name: data.notebook.name,
sections_items:
for section <- data.notebook.sections do
%{
id: section.id,
name: section.name,
parent: parent_section_view(section.parent_id, data),
status: cells_status(section.cells, data)
}
end,
clients:
data.clients_map
|> Enum.map(fn {client_pid, user_id} -> {client_pid, data.users_map[user_id]} end)
|> Enum.sort_by(fn {_client_pid, user} -> user.name end),
section_views: section_views(data.notebook.sections, data),
bin_entries: data.bin_entries,
created_at: DateTime.now!("Etc/UTC")
}
end
defp cells_status(cells, data) do
cond do
evaluating = Enum.find(cells, &evaluating?(&1, data)) ->
{:evaluating, evaluating.id}
stale = Enum.find(cells, &stale?(&1, data)) ->
{:stale, stale.id}
evaluated = Enum.find(Enum.reverse(cells), &evaluated?(&1, data)) ->
{:evaluated, evaluated.id}
true ->
{:fresh, nil}
end
end
defp global_status(data) do
cells =
data.notebook
|> Notebook.elixir_cells_with_section()
|> Enum.map(fn {cell, _} -> cell end)
cells_status(cells, data)
end
defp evaluating?(cell, data), do: data.cell_infos[cell.id].evaluation_status == :evaluating
defp stale?(cell, data), do: data.cell_infos[cell.id].validity_status == :stale
defp evaluated?(cell, data), do: data.cell_infos[cell.id].validity_status == :evaluated
defp section_views(sections, data) do
sections
|> Enum.map(& &1.name)
|> names_to_html_ids()
|> Enum.zip(sections)
|> Enum.map(fn {html_id, section} ->
%{
id: section.id,
html_id: html_id,
name: section.name,
parent: parent_section_view(section.parent_id, data),
has_children?: Notebook.child_sections(data.notebook, section.id) != [],
valid_parents:
for parent <- Notebook.valid_parents_for(data.notebook, section.id) do
%{id: parent.id, name: parent.name}
end,
cell_views: Enum.map(section.cells, &cell_to_view(&1, data))
}
end)
end
defp parent_section_view(nil, _data), do: nil
defp parent_section_view(parent_id, data) do
{:ok, section} = Notebook.fetch_section(data.notebook, parent_id)
%{id: section.id, name: section.name}
end
defp cell_to_view(%Cell.Elixir{} = cell, data) do
info = data.cell_infos[cell.id]
%{
id: cell.id,
type: :elixir,
# Note: we need this during initial loading,
# at which point we still have the source
empty?: cell.source == "",
outputs: cell.outputs,
validity_status: info.validity_status,
evaluation_status: info.evaluation_status,
evaluation_time_ms: info.evaluation_time_ms,
evaluation_start: info.evaluation_start,
number_of_evaluations: info.number_of_evaluations,
reevaluate_automatically: cell.reevaluate_automatically,
# Pass input values relevant to the given cell
input_values: input_values_for_cell(cell, data)
}
end
defp cell_to_view(%Cell.Markdown{} = cell, _data) do
%{
id: cell.id,
type: :markdown,
# Note: we need this during initial loading,
# at which point we still have the source
empty?: cell.source == ""
}
end
defp input_values_for_cell(cell, data) do
input_ids =
for output <- cell.outputs,
attrs <- Cell.Elixir.find_inputs_in_output(output),
do: attrs.id
Map.take(data.input_values, input_ids)
end
# Updates current data_view in response to an operation.
# In most cases we simply recompute data_view, but for the
# most common ones we only update the relevant parts.
defp update_data_view(data_view, data, operation) do
case operation do
{:report_cell_revision, _pid, _cell_id, _revision} ->
data_view
{:apply_cell_delta, _pid, cell_id, _delta, _revision} ->
data_view
|> update_cell_view(data, cell_id)
|> update_dirty_status(data)
_ ->
data_to_view(data)
end
end
defp update_cell_view(data_view, data, cell_id) do
{:ok, cell, section} = Notebook.fetch_cell_and_section(data.notebook, cell_id)
cell_view = cell_to_view(cell, data)
put_in(
data_view,
[:section_views, access_by_id(section.id), :cell_views, access_by_id(cell.id)],
cell_view
)
end
# Changes that affect only a single cell are still likely to
# have impact on dirtiness, so we need to always mirror it
defp update_dirty_status(data_view, data) do
put_in(data_view.dirty, data.dirty)
end
end
| 33.82416 | 185 | 0.615406 |
7920bb3eb05bfe180dbff8feaa87d033a4cb03e4 | 1,110 | ex | Elixir | clients/books/lib/google_api/books/v1/model/books_annotations_range.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/books/lib/google_api/books/v1/model/books_annotations_range.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/books/lib/google_api/books/v1/model/books_annotations_range.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Books.V1.Model.BooksAnnotationsRange do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"endOffset",
:"endPosition",
:"startOffset",
:"startPosition"
]
end
defimpl Poison.Decoder, for: GoogleApi.Books.V1.Model.BooksAnnotationsRange do
def decode(value, _options) do
value
end
end
| 27.75 | 78 | 0.737838 |
7920c3493fcf1be0d3d15f69bfb4d76ddb24d6f3 | 739 | ex | Elixir | web/views/stat_block_view.ex | Enkidatron/Dnd_Tracker | e373e5375cd78f26b4f5afc0c54eb6a91826ba60 | [
"MIT"
] | null | null | null | web/views/stat_block_view.ex | Enkidatron/Dnd_Tracker | e373e5375cd78f26b4f5afc0c54eb6a91826ba60 | [
"MIT"
] | 6 | 2016-10-13T15:00:18.000Z | 2016-10-17T18:40:38.000Z | web/views/stat_block_view.ex | Enkidatron/Dnd_Tracker | e373e5375cd78f26b4f5afc0c54eb6a91826ba60 | [
"MIT"
] | null | null | null | defmodule DndTracker.StatBlockView do
use DndTracker.Web, :view
def render("index.json", %{stat_blocks: stat_blocks}) do
%{data: render_many(stat_blocks, DndTracker.StatBlockView, "stat_block.json")}
end
def render("show.json", %{stat_block: stat_block}) do
%{data: render_one(stat_block, DndTracker.StatBlockView, "stat_block.json")}
end
def render("stat_block.json", %{stat_block: stat_block}) do
%{id: stat_block.id,
user: stat_block.user,
name: stat_block.name,
initiative: stat_block.initiative,
health: stat_block.health,
numDie: stat_block.numDie,
dieFace: stat_block.dieFace,
bonusHealth: stat_block.bonusHealth,
useHitDie: stat_block.useHitDie}
end
end
| 30.791667 | 82 | 0.709066 |
792109a8c5f23e52182697017f49d1e1ab79d0a0 | 2,959 | exs | Elixir | test/ja_serializer/deserializer_test.exs | strzibny/ja_serializer | 9823ada739ec1f0db9f14bd29f62a701dbd3b094 | [
"Apache-2.0"
] | 322 | 2016-11-18T22:58:17.000Z | 2022-01-18T15:07:24.000Z | test/ja_serializer/deserializer_test.exs | strzibny/ja_serializer | 9823ada739ec1f0db9f14bd29f62a701dbd3b094 | [
"Apache-2.0"
] | 150 | 2016-11-17T20:14:51.000Z | 2021-03-12T03:56:57.000Z | test/ja_serializer/deserializer_test.exs | strzibny/ja_serializer | 9823ada739ec1f0db9f14bd29f62a701dbd3b094 | [
"Apache-2.0"
] | 104 | 2016-11-17T18:02:36.000Z | 2021-04-06T09:52:16.000Z | defmodule JaSerializer.DeserializerTest do
use ExUnit.Case
use Plug.Test
defmodule ExamplePlug do
use Plug.Builder
plug(Plug.Parsers, parsers: [:json], json_decoder: Poison)
plug(JaSerializer.Deserializer)
plug(:return)
def return(conn, _opts) do
send_resp(conn, 200, "success")
end
end
setup do
on_exit(fn ->
Application.delete_env(:ja_serializer, :key_format)
end)
:ok
end
@ct "application/vnd.api+json"
test "Ignores bodyless requests" do
conn =
Plug.Test.conn("GET", "/")
|> put_req_header("content-type", @ct)
|> put_req_header("accept", @ct)
result = ExamplePlug.call(conn, [])
assert result.params == %{}
end
test "converts non-jsonapi.org format params" do
req_body = Poison.encode!(%{"some-nonsense" => "yup"})
conn =
Plug.Test.conn("POST", "/", req_body)
|> put_req_header("content-type", @ct)
|> put_req_header("accept", @ct)
result = ExamplePlug.call(conn, [])
assert result.params == %{"some_nonsense" => "yup"}
end
test "converts attribute key names" do
req_body =
Poison.encode!(%{
"data" => %{
"attributes" => %{
"some-nonsense" => true,
"foo-bar" => true,
"some-map" => %{
"nested-key" => "unaffected-values"
}
}
}
})
conn =
Plug.Test.conn("POST", "/", req_body)
|> put_req_header("content-type", @ct)
|> put_req_header("accept", @ct)
result = ExamplePlug.call(conn, [])
assert result.params["data"]["attributes"]["some_nonsense"]
assert result.params["data"]["attributes"]["foo_bar"]
assert result.params["data"]["attributes"]["some_map"]["nested_key"]
end
test "converts query param key names - dasherized" do
req_body = Poison.encode!(%{"data" => %{}})
conn =
Plug.Test.conn("POST", "/?page[page-size]=2", req_body)
|> put_req_header("content-type", @ct)
|> put_req_header("accept", @ct)
result = ExamplePlug.call(conn, [])
assert result.params["page"]["page_size"] == "2"
end
test "converts query param key names - underscored" do
Application.put_env(:ja_serializer, :key_format, :underscored)
req_body = Poison.encode!(%{"data" => %{}})
conn =
Plug.Test.conn("POST", "/?page[page_size]=2", req_body)
|> put_req_header("content-type", @ct)
|> put_req_header("accept", @ct)
result = ExamplePlug.call(conn, [])
assert result.query_params["page"]["page_size"] == "2"
end
test "retains payload type" do
req_body =
Poison.encode!(%{
"data" => %{
"type" => "foo"
}
})
conn =
Plug.Test.conn("POST", "/", req_body)
|> put_req_header("content-type", @ct)
|> put_req_header("accept", @ct)
result = ExamplePlug.call(conn, [])
assert result.params["data"]["type"] == "foo"
end
end
| 25.508621 | 72 | 0.582291 |
792109f3394023773085b2a2af6ebd8f15dab57c | 1,135 | exs | Elixir | config/config.exs | BoringButGreat/public_key_utils | 1fb28c1c58c3edf69e5e434af9140a50cb123f4f | [
"BSD-3-Clause"
] | 3 | 2017-04-18T19:30:56.000Z | 2020-07-23T09:49:49.000Z | config/config.exs | BoringButGreat/public_key_utils | 1fb28c1c58c3edf69e5e434af9140a50cb123f4f | [
"BSD-3-Clause"
] | null | null | null | config/config.exs | BoringButGreat/public_key_utils | 1fb28c1c58c3edf69e5e434af9140a50cb123f4f | [
"BSD-3-Clause"
] | 1 | 2021-11-09T20:36:06.000Z | 2021-11-09T20:36:06.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :public_key_utils, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:public_key_utils, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.612903 | 73 | 0.755066 |
7921251e987e8a906eed176142c3f99171034913 | 2,773 | ex | Elixir | clients/firestore/lib/google_api/firestore/v1beta1/model/listen_response.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/firestore/lib/google_api/firestore/v1beta1/model/listen_response.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/firestore/lib/google_api/firestore/v1beta1/model/listen_response.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Firestore.V1beta1.Model.ListenResponse do
@moduledoc """
The response for Firestore.Listen.
## Attributes
- documentChange (DocumentChange): A Document has changed. Defaults to: `null`.
- documentDelete (DocumentDelete): A Document has been deleted. Defaults to: `null`.
- documentRemove (DocumentRemove): A Document has been removed from a target (because it is no longer relevant to that target). Defaults to: `null`.
- filter (ExistenceFilter): A filter to apply to the set of documents previously returned for the given target. Returned when documents may have been removed from the given target, but the exact documents are unknown. Defaults to: `null`.
- targetChange (TargetChange): Targets have changed. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:documentChange => GoogleApi.Firestore.V1beta1.Model.DocumentChange.t(),
:documentDelete => GoogleApi.Firestore.V1beta1.Model.DocumentDelete.t(),
:documentRemove => GoogleApi.Firestore.V1beta1.Model.DocumentRemove.t(),
:filter => GoogleApi.Firestore.V1beta1.Model.ExistenceFilter.t(),
:targetChange => GoogleApi.Firestore.V1beta1.Model.TargetChange.t()
}
field(:documentChange, as: GoogleApi.Firestore.V1beta1.Model.DocumentChange)
field(:documentDelete, as: GoogleApi.Firestore.V1beta1.Model.DocumentDelete)
field(:documentRemove, as: GoogleApi.Firestore.V1beta1.Model.DocumentRemove)
field(:filter, as: GoogleApi.Firestore.V1beta1.Model.ExistenceFilter)
field(:targetChange, as: GoogleApi.Firestore.V1beta1.Model.TargetChange)
end
defimpl Poison.Decoder, for: GoogleApi.Firestore.V1beta1.Model.ListenResponse do
def decode(value, options) do
GoogleApi.Firestore.V1beta1.Model.ListenResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Firestore.V1beta1.Model.ListenResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.216667 | 241 | 0.759827 |
792153461c548d42f9d617381bf3123103eab124 | 7,218 | ex | Elixir | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/api/express_route_cross_connection_peerings.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/api/express_route_cross_connection_peerings.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/api/express_route_cross_connection_peerings.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Microsoft.Azure.Management.Network.Api.ExpressRouteCrossConnectionPeerings do
@moduledoc """
API calls for all endpoints tagged `ExpressRouteCrossConnectionPeerings`.
"""
alias Microsoft.Azure.Management.Network.Connection
import Microsoft.Azure.Management.Network.RequestBuilder
@doc """
Creates or updates a peering in the specified ExpressRouteCrossConnection.
## Parameters
- connection (Microsoft.Azure.Management.Network.Connection): Connection to server
- resource_group_name (String.t): The name of the resource group.
- cross_connection_name (String.t): The name of the ExpressRouteCrossConnection.
- peering_name (String.t): The name of the peering.
- peering_parameters (ExpressRouteCrossConnectionPeering): Parameters supplied to the create or update ExpressRouteCrossConnection peering operation.
- api_version (String.t): Client API version.
- subscription_id (String.t): The subscription credentials which uniquely identify the Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %Microsoft.Azure.Management.Network.Model.ExpressRouteCrossConnectionPeering{}} on success
{:error, info} on failure
"""
@spec express_route_cross_connection_peerings_create_or_update(Tesla.Env.client, String.t, String.t, String.t, Microsoft.Azure.Management.Network.Model.ExpressRouteCrossConnectionPeering.t, String.t, String.t, keyword()) :: {:ok, Microsoft.Azure.Management.Network.Model.ExpressRouteCrossConnectionPeering.t} | {:error, Tesla.Env.t}
def express_route_cross_connection_peerings_create_or_update(connection, resource_group_name, cross_connection_name, peering_name, peering_parameters, api_version, subscription_id, _opts \\ []) do
%{}
|> method(:put)
|> url("/subscriptions/#{subscription_id}/resourceGroups/#{resource_group_name}/providers/Microsoft.Network/expressRouteCrossConnections/#{cross_connection_name}/peerings/#{peering_name}")
|> add_param(:body, :body, peering_parameters)
|> add_param(:query, :"api-version", api_version)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%Microsoft.Azure.Management.Network.Model.ExpressRouteCrossConnectionPeering{})
end
@doc """
Deletes the specified peering from the ExpressRouteCrossConnection.
## Parameters
- connection (Microsoft.Azure.Management.Network.Connection): Connection to server
- resource_group_name (String.t): The name of the resource group.
- cross_connection_name (String.t): The name of the ExpressRouteCrossConnection.
- peering_name (String.t): The name of the peering.
- api_version (String.t): Client API version.
- subscription_id (String.t): The subscription credentials which uniquely identify the Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %{}} on success
{:error, info} on failure
"""
@spec express_route_cross_connection_peerings_delete(Tesla.Env.client, String.t, String.t, String.t, String.t, String.t, keyword()) :: {:ok, nil} | {:error, Tesla.Env.t}
def express_route_cross_connection_peerings_delete(connection, resource_group_name, cross_connection_name, peering_name, api_version, subscription_id, _opts \\ []) do
%{}
|> method(:delete)
|> url("/subscriptions/#{subscription_id}/resourceGroups/#{resource_group_name}/providers/Microsoft.Network/expressRouteCrossConnections/#{cross_connection_name}/peerings/#{peering_name}")
|> add_param(:query, :"api-version", api_version)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(false)
end
@doc """
Gets the specified peering for the ExpressRouteCrossConnection.
## Parameters
- connection (Microsoft.Azure.Management.Network.Connection): Connection to server
- resource_group_name (String.t): The name of the resource group.
- cross_connection_name (String.t): The name of the ExpressRouteCrossConnection.
- peering_name (String.t): The name of the peering.
- api_version (String.t): Client API version.
- subscription_id (String.t): The subscription credentials which uniquely identify the Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %Microsoft.Azure.Management.Network.Model.ExpressRouteCrossConnectionPeering{}} on success
{:error, info} on failure
"""
@spec express_route_cross_connection_peerings_get(Tesla.Env.client, String.t, String.t, String.t, String.t, String.t, keyword()) :: {:ok, Microsoft.Azure.Management.Network.Model.ExpressRouteCrossConnectionPeering.t} | {:error, Tesla.Env.t}
def express_route_cross_connection_peerings_get(connection, resource_group_name, cross_connection_name, peering_name, api_version, subscription_id, _opts \\ []) do
%{}
|> method(:get)
|> url("/subscriptions/#{subscription_id}/resourceGroups/#{resource_group_name}/providers/Microsoft.Network/expressRouteCrossConnections/#{cross_connection_name}/peerings/#{peering_name}")
|> add_param(:query, :"api-version", api_version)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%Microsoft.Azure.Management.Network.Model.ExpressRouteCrossConnectionPeering{})
end
@doc """
Gets all peerings in a specified ExpressRouteCrossConnection.
## Parameters
- connection (Microsoft.Azure.Management.Network.Connection): Connection to server
- resource_group_name (String.t): The name of the resource group.
- cross_connection_name (String.t): The name of the ExpressRouteCrossConnection.
- api_version (String.t): Client API version.
- subscription_id (String.t): The subscription credentials which uniquely identify the Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %Microsoft.Azure.Management.Network.Model.ExpressRouteCrossConnectionPeeringList{}} on success
{:error, info} on failure
"""
@spec express_route_cross_connection_peerings_list(Tesla.Env.client, String.t, String.t, String.t, String.t, keyword()) :: {:ok, Microsoft.Azure.Management.Network.Model.ExpressRouteCrossConnectionPeeringList.t} | {:error, Tesla.Env.t}
def express_route_cross_connection_peerings_list(connection, resource_group_name, cross_connection_name, api_version, subscription_id, _opts \\ []) do
%{}
|> method(:get)
|> url("/subscriptions/#{subscription_id}/resourceGroups/#{resource_group_name}/providers/Microsoft.Network/expressRouteCrossConnections/#{cross_connection_name}/peerings")
|> add_param(:query, :"api-version", api_version)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%Microsoft.Azure.Management.Network.Model.ExpressRouteCrossConnectionPeeringList{})
end
end
| 55.099237 | 334 | 0.764616 |
79218d754ffd9a4f236cbf417bc1e2ae540e4140 | 9,266 | ex | Elixir | installer/lib/phx_new/generator.ex | sunaku/phoenix | f06696118f17af71b78571583fcfaf6e388b2bd9 | [
"MIT"
] | null | null | null | installer/lib/phx_new/generator.ex | sunaku/phoenix | f06696118f17af71b78571583fcfaf6e388b2bd9 | [
"MIT"
] | null | null | null | installer/lib/phx_new/generator.ex | sunaku/phoenix | f06696118f17af71b78571583fcfaf6e388b2bd9 | [
"MIT"
] | null | null | null | defmodule Phx.New.Generator do
@moduledoc false
import Mix.Generator
alias Phx.New.{Project}
@phoenix Path.expand("../..", __DIR__)
@phoenix_version Version.parse!("1.4.0")
@callback prepare_project(Project.t) :: Project.t
@callback generate(Project.t) :: Project.t
defmacro __using__(_env) do
quote do
@behaviour unquote(__MODULE__)
import unquote(__MODULE__)
import Mix.Generator
Module.register_attribute(__MODULE__, :templates, accumulate: true)
@before_compile unquote(__MODULE__)
end
end
defmacro __before_compile__(env) do
root = Path.expand("../../templates", __DIR__)
templates_ast = for {name, mappings} <- Module.get_attribute(env.module, :templates) do
for {format, source, _, _} <- mappings, format != :keep do
path = Path.join(root, source)
quote do
@external_resource unquote(path)
def render(unquote(name), unquote(source)), do: unquote(File.read!(path))
end
end
end
quote do
unquote(templates_ast)
def template_files(name), do: Keyword.fetch!(@templates, name)
# Embed missing files from Phoenix static.
embed_text :phoenix_js, from_file: Path.expand("../../templates/phx_assets/phoenix.js", unquote(__DIR__))
embed_text :phoenix_png, from_file: Path.expand("../../templates/phx_assets/phoenix.png", unquote(__DIR__))
embed_text :phoenix_favicon, from_file: Path.expand("../../templates/phx_assets/favicon.ico", unquote(__DIR__))
end
end
defmacro template(name, mappings) do
quote do
@templates {unquote(name), unquote(mappings)}
end
end
def copy_from(%Project{} = project, mod, name) when is_atom(name) do
mapping = mod.template_files(name)
for {format, source, project_location, target_path} <- mapping do
target = Project.join_path(project, project_location, target_path)
case format do
:keep ->
File.mkdir_p!(target)
:text ->
create_file(target, mod.render(name, source))
:append ->
append_to(Path.dirname(target), Path.basename(target), mod.render(name, source))
:eex ->
contents = EEx.eval_string(mod.render(name, source), project.binding, file: source)
create_file(target, contents)
end
end
end
def append_to(path, file, contents) do
file = Path.join(path, file)
File.write!(file, File.read!(file) <> contents)
end
def in_umbrella?(app_path) do
umbrella = Path.expand(Path.join [app_path, "..", ".."])
mix_path = Path.join(umbrella, "mix.exs")
apps_path = Path.join(umbrella, "apps")
File.exists?(mix_path) && File.exists?(apps_path)
end
def put_binding(%Project{opts: opts} = project) do
db = Keyword.get(opts, :database, "postgres")
ecto = Keyword.get(opts, :ecto, true)
html = Keyword.get(opts, :html, true)
webpack = Keyword.get(opts, :webpack, true)
dev = Keyword.get(opts, :dev, false)
phoenix_path = phoenix_path(project, dev)
# We lowercase the database name because according to the
# SQL spec, they are case insensitive unless quoted, which
# means creating a database like FoO is the same as foo in
# some storages.
{adapter_app, adapter_module, adapter_config} =
get_ecto_adapter(db, String.downcase(project.app), project.app_mod)
pubsub_server = get_pubsub_server(project.app_mod)
adapter_config =
case Keyword.fetch(opts, :binary_id) do
{:ok, value} -> Keyword.put_new(adapter_config, :binary_id, value)
:error -> adapter_config
end
version = @phoenix_version
binding = [
elixir_version: elixir_version(),
app_name: project.app,
app_module: inspect(project.app_mod),
root_app_name: project.root_app,
root_app_module: inspect(project.root_mod),
lib_web_name: project.lib_web_name,
web_app_name: project.web_app,
endpoint_module: inspect(Module.concat(project.web_namespace, Endpoint)),
web_namespace: inspect(project.web_namespace),
phoenix_github_version_tag: "v#{version.major}.#{version.minor}.#{version.patch}",
phoenix_dep: phoenix_dep(phoenix_path),
phoenix_path: phoenix_path,
phoenix_webpack_path: phoenix_webpack_path(project, dev),
phoenix_html_webpack_path: phoenix_html_webpack_path(project),
phoenix_static_path: phoenix_static_path(phoenix_path),
pubsub_server: pubsub_server,
secret_key_base: random_string(64),
prod_secret_key_base: random_string(64),
signing_salt: random_string(8),
in_umbrella: project.in_umbrella?,
webpack: webpack,
ecto: ecto,
html: html,
adapter_app: adapter_app,
adapter_module: adapter_module,
adapter_config: adapter_config,
generators: nil_if_empty(project.generators ++ adapter_generators(adapter_config)),
namespaced?: namespaced?(project),
]
%Project{project | binding: binding}
end
defp elixir_version do
System.version()
end
defp namespaced?(project) do
Macro.camelize(project.app) != inspect(project.app_mod)
end
def gen_ecto_config(%Project{app_path: app_path, binding: binding}) do
adapter_config = binding[:adapter_config]
append_to app_path, "config/dev.exs", """
# Configure your database
config :#{binding[:app_name]}, #{binding[:app_module]}.Repo#{kw_to_config adapter_config[:dev]},
pool_size: 10
"""
append_to app_path, "config/test.exs", """
# Configure your database
config :#{binding[:app_name]}, #{binding[:app_module]}.Repo#{kw_to_config adapter_config[:test]}
"""
append_to app_path, "config/prod.secret.exs", """
# Configure your database
config :#{binding[:app_name]}, #{binding[:app_module]}.Repo#{kw_to_config adapter_config[:prod]},
pool_size: 15
"""
end
defp get_pubsub_server(module) do
module
|> Module.split()
|> hd()
|> Module.concat(PubSub)
end
defp get_ecto_adapter("mysql", app, module) do
{:mariaex, Ecto.Adapters.MySQL, db_config(app, module, "root", "")}
end
defp get_ecto_adapter("postgres", app, module) do
{:postgrex, Ecto.Adapters.Postgres, db_config(app, module, "postgres", "postgres")}
end
defp get_ecto_adapter("mssql", app, module) do
{:mssql_ecto, MssqlEcto, db_config(app, module, "sa", "")}
end
defp get_ecto_adapter(db, _app, _mod) do
Mix.raise "Unknown database #{inspect db}"
end
defp db_config(app, module, user, pass) do
[dev: [username: user, password: pass, database: "#{app}_dev", hostname: "localhost"],
test: [username: user, password: pass, database: "#{app}_test", hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox],
prod: [username: user, password: pass, database: "#{app}_prod"],
test_setup_all: "Ecto.Adapters.SQL.Sandbox.mode(#{inspect module}.Repo, :manual)",
test_setup: ":ok = Ecto.Adapters.SQL.Sandbox.checkout(#{inspect module}.Repo)",
test_async: "Ecto.Adapters.SQL.Sandbox.mode(#{inspect module}.Repo, {:shared, self()})"]
end
defp kw_to_config(kw) do
Enum.map(kw, fn {k, v} ->
",\n #{k}: #{inspect v}"
end)
end
defp adapter_generators(adapter_config) do
adapter_config
|> Keyword.take([:binary_id, :migration, :sample_binary_id])
|> Enum.filter(fn {_, value} -> not is_nil(value) end)
end
defp nil_if_empty([]), do: nil
defp nil_if_empty(other), do: other
defp phoenix_path(%Project{} = project, true) do
absolute = Path.expand(project.project_path)
relative = Path.relative_to(absolute, @phoenix)
if absolute == relative do
Mix.raise "--dev projects must be generated inside Phoenix directory"
end
project
|> phoenix_path_prefix()
|> Path.join(relative)
|> Path.split()
|> Enum.map(fn _ -> ".." end)
|> Path.join()
end
defp phoenix_path(%Project{}, false) do
"deps/phoenix"
end
defp phoenix_path_prefix(%Project{in_umbrella?: true}), do: "../../../"
defp phoenix_path_prefix(%Project{in_umbrella?: false}), do: ".."
defp phoenix_webpack_path(%Project{in_umbrella?: true}, true = _dev),
do: "../../../../../"
defp phoenix_webpack_path(%Project{in_umbrella?: true}, false = _dev),
do: "../../../deps/phoenix"
defp phoenix_webpack_path(%Project{in_umbrella?: false}, true = _dev),
do: "../../../"
defp phoenix_webpack_path(%Project{in_umbrella?: false}, false = _dev),
do: "../deps/phoenix"
defp phoenix_html_webpack_path(%Project{in_umbrella?: true}),
do: "../../../deps/phoenix_html"
defp phoenix_html_webpack_path(%Project{in_umbrella?: false}),
do: "../deps/phoenix_html"
defp phoenix_dep("deps/phoenix"), do: ~s[{:phoenix, "~> #{@phoenix_version}"}]
# defp phoenix_dep("deps/phoenix"), do: ~s[{:phoenix, github: "phoenixframework/phoenix", override: true}]
defp phoenix_dep(path), do: ~s[{:phoenix, path: #{inspect path}, override: true}]
defp phoenix_static_path("deps/phoenix"), do: "deps/phoenix"
defp phoenix_static_path(path), do: Path.join("..", path)
defp random_string(length) do
:crypto.strong_rand_bytes(length) |> Base.encode64 |> binary_part(0, length)
end
end
| 34.966038 | 117 | 0.669005 |
7921954a350ed905d2b0a4344b936e6f234a76d5 | 515 | ex | Elixir | phoenix/discuss/web/models/user.ex | pshaddel/turbo-Elixir | fb5fc900ad40c963f885b75d94f4fe360d69bcb8 | [
"MIT"
] | 1 | 2021-04-13T06:48:57.000Z | 2021-04-13T06:48:57.000Z | phoenix/discuss/web/models/user.ex | pshaddel/turbo-Elixir | fb5fc900ad40c963f885b75d94f4fe360d69bcb8 | [
"MIT"
] | null | null | null | phoenix/discuss/web/models/user.ex | pshaddel/turbo-Elixir | fb5fc900ad40c963f885b75d94f4fe360d69bcb8 | [
"MIT"
] | null | null | null | defmodule Discuss.User do
use Discuss.Web, :model
@derive {Poison.Encoder, only: [:name, :email]}
schema "users" do
field(:name, :string)
field(:provider, :string)
field(:token, :string)
field(:email, :string)
has_many(:topics, Discuss.Topic)
has_many(:comments, Discuss.Comment)
timestamps()
end
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:email, :provider, :token, :name])
|> validate_required([:email, :provider, :token, :name])
end
end
| 24.52381 | 60 | 0.638835 |
79219ad6678ab375efb866a8b61a2d8a69b86636 | 71 | exs | Elixir | test/views/layout_view_test.exs | bcoop713/rumbl | 831982b86f8f4e6540b6d481e36e2e3c3470b5b1 | [
"MIT"
] | 1 | 2016-09-19T01:31:35.000Z | 2016-09-19T01:31:35.000Z | test/views/layout_view_test.exs | bcoop713/rumbl | 831982b86f8f4e6540b6d481e36e2e3c3470b5b1 | [
"MIT"
] | null | null | null | test/views/layout_view_test.exs | bcoop713/rumbl | 831982b86f8f4e6540b6d481e36e2e3c3470b5b1 | [
"MIT"
] | null | null | null | defmodule Rumbl.LayoutViewTest do
use Rumbl.ConnCase, async: true
end | 23.666667 | 33 | 0.816901 |
79226a01f4ffc0447b716f1a2161b6bb7203cf8f | 184 | exs | Elixir | lib/mix/test/fixtures/umbrella_dep/mix.exs | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | 4 | 2015-12-22T02:46:39.000Z | 2016-04-26T06:11:09.000Z | lib/mix/test/fixtures/umbrella_dep/mix.exs | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | lib/mix/test/fixtures/umbrella_dep/mix.exs | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | defmodule UmbrellaDep.Mixfile do
use Mix.Project
def project do
[app: :umbrella_dep,
deps: deps]
end
defp deps do
[{:umbrella, path: "deps/umbrella"}]
end
end
| 14.153846 | 40 | 0.652174 |
79229571989409b10d857d3b4b411357881bd2b1 | 5,938 | ex | Elixir | oeml-sdk/elixir/lib/oeml_restapi/api/orders.ex | scorninpc/coinapi-sdk | e4ff6b79a26bb412ab9ac5f2d1e4a7cef560b1e7 | [
"MIT"
] | null | null | null | oeml-sdk/elixir/lib/oeml_restapi/api/orders.ex | scorninpc/coinapi-sdk | e4ff6b79a26bb412ab9ac5f2d1e4a7cef560b1e7 | [
"MIT"
] | null | null | null | oeml-sdk/elixir/lib/oeml_restapi/api/orders.ex | scorninpc/coinapi-sdk | e4ff6b79a26bb412ab9ac5f2d1e4a7cef560b1e7 | [
"MIT"
] | null | null | null | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule OEML-RESTAPI.Api.Orders do
@moduledoc """
API calls for all endpoints tagged `Orders`.
"""
alias OEML-RESTAPI.Connection
import OEML-RESTAPI.RequestBuilder
@doc """
Cancel all orders request
This request cancels all open orders on single specified exchange.
## Parameters
- connection (OEML-RESTAPI.Connection): Connection to server
- order_cancel_all_request (OrderCancelAllRequest): OrderCancelAllRequest object.
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %OEML-RESTAPI.Model.Message{}} on success
{:error, info} on failure
"""
@spec v1_orders_cancel_all_post(Tesla.Env.client, OEML-RESTAPI.Model.OrderCancelAllRequest.t, keyword()) :: {:ok, OEML-RESTAPI.Model.Message.t} | {:ok, OEML-RESTAPI.Model.ValidationError.t} | {:error, Tesla.Env.t}
def v1_orders_cancel_all_post(connection, order_cancel_all_request, _opts \\ []) do
%{}
|> method(:post)
|> url("/v1/orders/cancel/all")
|> add_param(:body, :body, order_cancel_all_request)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %OEML-RESTAPI.Model.Message{}},
{ 400, %OEML-RESTAPI.Model.ValidationError{}},
{ 490, %OEML-RESTAPI.Model.Message{}}
])
end
@doc """
Cancel order request
Request cancel for an existing order. The order can be canceled using the `client_order_id` or `exchange_order_id`.
## Parameters
- connection (OEML-RESTAPI.Connection): Connection to server
- order_cancel_single_request (OrderCancelSingleRequest): OrderCancelSingleRequest object.
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %OEML-RESTAPI.Model.OrderExecutionReport{}} on success
{:error, info} on failure
"""
@spec v1_orders_cancel_post(Tesla.Env.client, OEML-RESTAPI.Model.OrderCancelSingleRequest.t, keyword()) :: {:ok, OEML-RESTAPI.Model.OrderExecutionReport.t} | {:ok, OEML-RESTAPI.Model.Message.t} | {:ok, OEML-RESTAPI.Model.ValidationError.t} | {:error, Tesla.Env.t}
def v1_orders_cancel_post(connection, order_cancel_single_request, _opts \\ []) do
%{}
|> method(:post)
|> url("/v1/orders/cancel")
|> add_param(:body, :body, order_cancel_single_request)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %OEML-RESTAPI.Model.OrderExecutionReport{}},
{ 400, %OEML-RESTAPI.Model.ValidationError{}},
{ 490, %OEML-RESTAPI.Model.Message{}}
])
end
@doc """
Get open orders
Get last execution reports for open orders across all or single exchange.
## Parameters
- connection (OEML-RESTAPI.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :exchange_id (String.t): Filter the open orders to the specific exchange.
## Returns
{:ok, [%OrderExecutionReport{}, ...]} on success
{:error, info} on failure
"""
@spec v1_orders_get(Tesla.Env.client, keyword()) :: {:ok, OEML-RESTAPI.Model.Message.t} | {:ok, list(OEML-RESTAPI.Model.OrderExecutionReport.t)} | {:error, Tesla.Env.t}
def v1_orders_get(connection, opts \\ []) do
optional_params = %{
:"exchange_id" => :query
}
%{}
|> method(:get)
|> url("/v1/orders")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, [%OEML-RESTAPI.Model.OrderExecutionReport{}]},
{ 490, %OEML-RESTAPI.Model.Message{}}
])
end
@doc """
Send new order
This request creating new order for the specific exchange.
## Parameters
- connection (OEML-RESTAPI.Connection): Connection to server
- order_new_single_request (OrderNewSingleRequest): OrderNewSingleRequest object.
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %OEML-RESTAPI.Model.OrderExecutionReport{}} on success
{:error, info} on failure
"""
@spec v1_orders_post(Tesla.Env.client, OEML-RESTAPI.Model.OrderNewSingleRequest.t, keyword()) :: {:ok, OEML-RESTAPI.Model.OrderExecutionReport.t} | {:ok, OEML-RESTAPI.Model.Message.t} | {:ok, OEML-RESTAPI.Model.ValidationError.t} | {:error, Tesla.Env.t}
def v1_orders_post(connection, order_new_single_request, _opts \\ []) do
%{}
|> method(:post)
|> url("/v1/orders")
|> add_param(:body, :body, order_new_single_request)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %OEML-RESTAPI.Model.OrderExecutionReport{}},
{ 400, %OEML-RESTAPI.Model.ValidationError{}},
{ 490, %OEML-RESTAPI.Model.Message{}},
{ 504, %OEML-RESTAPI.Model.Message{}}
])
end
@doc """
Get order execution report
Get the last order execution report for the specified order. The requested order does not need to be active or opened.
## Parameters
- connection (OEML-RESTAPI.Connection): Connection to server
- client_order_id (String.t): The unique identifier of the order assigned by the client.
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %OEML-RESTAPI.Model.OrderExecutionReport{}} on success
{:error, info} on failure
"""
@spec v1_orders_status_client_order_id_get(Tesla.Env.client, String.t, keyword()) :: {:ok, OEML-RESTAPI.Model.OrderExecutionReport.t} | {:ok, OEML-RESTAPI.Model.Message.t} | {:error, Tesla.Env.t}
def v1_orders_status_client_order_id_get(connection, client_order_id, _opts \\ []) do
%{}
|> method(:get)
|> url("/v1/orders/status/#{client_order_id}")
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %OEML-RESTAPI.Model.OrderExecutionReport{}},
{ 404, %OEML-RESTAPI.Model.Message{}}
])
end
end
| 37.1125 | 265 | 0.685584 |
7922a02cd3ed073a24f29144fda6b093a3110d9c | 69 | ex | Elixir | lib/restfully/repo.ex | marcsugiyama/restfully | 8352bc5718c1298c836ed72fc9d7b5cd4a9695bb | [
"Apache-2.0"
] | 1 | 2018-03-14T23:48:57.000Z | 2018-03-14T23:48:57.000Z | lib/restfully/repo.ex | marcsugiyama/restfully | 8352bc5718c1298c836ed72fc9d7b5cd4a9695bb | [
"Apache-2.0"
] | null | null | null | lib/restfully/repo.ex | marcsugiyama/restfully | 8352bc5718c1298c836ed72fc9d7b5cd4a9695bb | [
"Apache-2.0"
] | 2 | 2018-06-04T12:37:32.000Z | 2021-06-15T11:45:05.000Z | defmodule Restfully.Repo do
use Ecto.Repo, otp_app: :restfully
end
| 17.25 | 36 | 0.782609 |
7922a970743da9acb5c25e6d2faa4beaf15c9087 | 389 | exs | Elixir | priv/repo/migrations/20171008095044_create_input_types.exs | vahidabdi/escala_api | 84a4a3ef832180f12c6197683933d8cd0ab35ef4 | [
"MIT"
] | null | null | null | priv/repo/migrations/20171008095044_create_input_types.exs | vahidabdi/escala_api | 84a4a3ef832180f12c6197683933d8cd0ab35ef4 | [
"MIT"
] | null | null | null | priv/repo/migrations/20171008095044_create_input_types.exs | vahidabdi/escala_api | 84a4a3ef832180f12c6197683933d8cd0ab35ef4 | [
"MIT"
] | null | null | null | defmodule Escala.Repo.Migrations.CreateInputTypes do
use Ecto.Migration
def change do
create table(:input_types, primary_key: false) do
add :id, :uuid, primary_key: true, default: fragment("uuid_generate_v4()")
add :name, :text, null: false
add :has_option, :boolean, default: false, null: false
end
create unique_index(:input_types, [:name])
end
end
| 27.785714 | 80 | 0.699229 |
7922b6a79af9b8cab124ab991592ca12aa7422d1 | 2,421 | ex | Elixir | plugins/one_chat/lib/one_chat_web/views/client_view.ex | smpallen99/ucx_ucc | 47225f205a6ac4aacdb9bb4f7512dcf4092576ad | [
"MIT"
] | 11 | 2017-05-15T18:35:05.000Z | 2018-02-05T18:27:40.000Z | plugins/one_chat/lib/one_chat_web/views/client_view.ex | anndream/infinity_one | 47225f205a6ac4aacdb9bb4f7512dcf4092576ad | [
"MIT"
] | 15 | 2017-11-27T10:38:05.000Z | 2018-02-09T20:42:08.000Z | plugins/one_chat/lib/one_chat_web/views/client_view.ex | anndream/infinity_one | 47225f205a6ac4aacdb9bb4f7512dcf4092576ad | [
"MIT"
] | 4 | 2017-09-13T11:34:16.000Z | 2018-02-26T13:37:06.000Z | defmodule OneChatWeb.ClientView do
use OneChatWeb, :view
def page_loading do
"""
<style>
#initial-page-loading .loading-animation {
background: linear-gradient(to top, #6c6c6c 0%, #aaaaaa 100%);
z-index: 1000;
}
.loading-animation {
top: 0;
right: 0;
left: 0;
display: flex;
align-items: center;
position: absolute;
justify-content: center;
text-align: center;
z-index: 100;
height: 100% !important;
}
.loading-animation > div {
width: 10px;
height: 10px;
margin: 2px;
border-radius: 100%;
display: inline-block;
background-color: rgba(255,255,255,0.6);
-webkit-animation: loading-bouncedelay 1.4s infinite ease-in-out both;
animation: loading-bouncedelay 1.4s infinite ease-in-out both;
}
.loading-animation .bounce1 {
-webkit-animation-delay: -0.32s;
animation-delay: -0.32s;
}
.loading-animation .bounce2 {
-webkit-animation-delay: -0.16s;
animation-delay: -0.16s;
}
@-webkit-keyframes loading-bouncedelay {
0%,
80%,
100% { -webkit-transform: scale(0) }
40% { -webkit-transform: scale(1.0) }
}
@keyframes loading-bouncedelay {
0%,
80%,
100% { transform: scale(0); }
40% { transform: scale(1.0); }
}
.page-loading-container {
position: absolute;
top: 0; right: 0; left: 0; bottom: 0;
z-index: 5000;
background: black;
opacity: 0.8;
}
.page-loading-container .loading-animation > div {
background-color: #eee !important;
}
.loading-animation.light_on_dark {
background-color: rgba(0,0,0,0.7);
position: fixed;
right: 40px;
left: unset;
bottom: 0;
height: 100%;
width: 400px;
}
.loading-animation.light_on_dark > div {
background-color: #eee !important;
}
</style>
"""
end
def loadmore do
~s(<li class="load-more"></li>)
end
def loading_animation(class \\ :default) do
"""
<div class="loading-animation #{class}">
<div class="bounce1"></div>
<div class="bounce2"></div>
<div class="bounce3"></div>
</div>
"""
|> String.replace("\n", "")
end
end
| 25.484211 | 78 | 0.537794 |
7923095ab736978e76d59d31524960d4d27a7a41 | 982 | ex | Elixir | lib/weather_api/cache/location_cache.ex | vegashat/weather-api | d06ce0bf809bd98710367c459920bc5cef384f94 | [
"MIT"
] | null | null | null | lib/weather_api/cache/location_cache.ex | vegashat/weather-api | d06ce0bf809bd98710367c459920bc5cef384f94 | [
"MIT"
] | null | null | null | lib/weather_api/cache/location_cache.ex | vegashat/weather-api | d06ce0bf809bd98710367c459920bc5cef384f94 | [
"MIT"
] | null | null | null | defmodule WeatherApi.Cache.LocationCache do
use GenServer
alias WeatherApi.Models.Location
def start_link do
GenServer.start_link(__MODULE__, %{}, name: :location_cache)
end
def init(_) do
state = %{
location: %{}
}
{:ok, state}
end
def set_location(%Location{} = location) do
GenServer.cast(:location_cache, {:set_location, location})
end
def get_location(name) do
GenServer.call(:location_cache, {:get_location, name})
end
#Callbacks
def handle_call({:get_location, name}, _from, state) do
location_map = Map.get(state, :location)
location = Map.get(location_map, name)
{:reply, location, state}
end
def handle_cast({:set_location, %Location{} = location}, state) do
key = location.name
state = put_in(state, [:location, key], location)
{:noreply, state}
end
end | 25.842105 | 71 | 0.588595 |
79231e1158dab09cf6e08ed2130fdda46fa18293 | 6,118 | ex | Elixir | lib/glimesh_web/live/support_modal/sub_form.ex | mfaqiri/glimesh.tv | 59eeb56bb0a5574b7b6168d9487660f41a457fd4 | [
"MIT"
] | null | null | null | lib/glimesh_web/live/support_modal/sub_form.ex | mfaqiri/glimesh.tv | 59eeb56bb0a5574b7b6168d9487660f41a457fd4 | [
"MIT"
] | null | null | null | lib/glimesh_web/live/support_modal/sub_form.ex | mfaqiri/glimesh.tv | 59eeb56bb0a5574b7b6168d9487660f41a457fd4 | [
"MIT"
] | null | null | null | defmodule GlimeshWeb.SupportModal.SubForm do
use GlimeshWeb, :live_view
alias Glimesh.Accounts
alias Glimesh.Payments
@impl true
def render(assigns) do
~L"""
<div>
<%= if @subscribed do %>
<%= if @canceling do %>
<h5><%= gettext("Resubscribe") %></h5>
<%= if @stripe_error do %>
<div class="alert alert-danger" role="alert">
<%= @stripe_error %>
</div>
<% end %>
<p><%= gettext("Your subscription is currently set to automatically cancel on %{date}.", date: format_datetime(@subscription.ended_at)) %></p>
<p><%= gettext("You can resubscribe by clicking the button below, and your subscription will be renewed until you cancel it.") %></p>
<button class="btn btn-primary btn-block btn-lg" phx-click="resubscribe" phx-throttle="5000"><%= gettext("Resubscribe") %></button>
<% else %>
<h4 class="mt-2"><%= gettext("You're subscribed!") %></h4>
<p><%= gettext("Thanks for supporting %{streamer}. Your genuine support is appreciated.", streamer: @streamer.displayname) %></p>
<div class="text-center mt-4">
<h4>Channel Subscription<br>
<small><strong>$<%= format_price(@subscription.price) %></strong> /
<%= gettext("monthly") %></small></h4>
</div>
<button class="btn btn-danger btn-block mt-4" phx-click="unsubscribe" phx-throttle="1000"><%= gettext("Cancel Subscription") %></button>
<% end %>
<% else %>
<%= if @user do %>
<h5><%= gettext("Your Payment Information") %></h5>
<%= if @stripe_error do %>
<div class="alert alert-danger" role="alert">
<%= @stripe_error %>
</div>
<% end %>
<%= live_component GlimeshWeb.SubscriptionComponent, id: "subscription-component", type: :channel, user: @user, streamer: @streamer, product_id: @product_id, price_id: @price_id, price: @price %>
<% else %>
<h4 class="mt-4"><%= gettext("What is Glimesh?") %></h4>
<p class="">
<%= gettext("People first streaming, with discoverability as a primary feature. Let's build the next
generation of streaming.") %> <%= link gettext("Learn More"), to: Routes.about_path(@socket, :faq), target: "_blank" %>
</p>
<%= link gettext("Register"), class: "btn btn-primary btn-block mt-4", to: Routes.user_registration_path(@socket, :new), target: "_blank" %>
<p class="mt-2 text-center">or <%= link gettext("Log in"), class: "", to: Routes.user_session_path(@socket, :new), target: "_blank" %></p>
<% end %>
<% end %>
</div>
"""
end
@impl true
def mount(_params, %{"streamer" => streamer, "user" => nil} = session, socket) do
if session["locale"], do: Gettext.put_locale(session["locale"])
{:ok,
socket
|> assign(:streamer, streamer)
|> assign(:can_subscribe, false)
|> assign(:user, nil)
|> assign(:subscribed, false)
|> assign(:canceling, false)}
end
@impl true
def mount(_params, %{"streamer" => streamer, "user" => user} = session, socket) do
if session["locale"], do: Gettext.put_locale(session["locale"])
subscription = Glimesh.Payments.get_channel_subscription(user, streamer)
can_subscribe = if Accounts.can_use_payments?(user), do: user.id != streamer.id, else: false
can_receive_payments = Accounts.can_receive_payments?(streamer)
subscribed = !is_nil(subscription)
{:ok,
socket
|> assign(:stripe_public_key, Application.get_env(:stripity_stripe, :public_api_key))
|> assign(:stripe_customer_id, Accounts.get_stripe_customer_id(user))
|> assign(:stripe_payment_method, user.stripe_payment_method)
|> assign(:stripe_error, nil)
|> assign(:product_id, Payments.get_channel_sub_base_product_id())
|> assign(:price_id, Payments.get_channel_sub_base_price_id())
|> assign(:price, Payments.get_channel_sub_base_price())
|> assign(:streamer, streamer)
|> assign(:user, user)
|> assign(:can_subscribe, can_subscribe && can_receive_payments && Glimesh.has_launched?())
|> assign(:canceling, if(subscription, do: subscription.is_canceling, else: false))
|> assign(:subscription, subscription)
|> assign(:subscribed, subscribed)}
end
@impl true
def handle_event("subscriptions.subscribe", %{"paymentMethodId" => payment_method}, socket) do
streamer = socket.assigns.streamer
user = socket.assigns.user
with {:ok, user} <- Payments.set_payment_method(user, payment_method),
{:ok, subscription} <-
Payments.subscribe_to_channel(
user,
streamer,
socket.assigns.product_id,
socket.assigns.price_id
) do
{:reply, subscription,
socket
|> assign(:user, Accounts.get_user!(user.id))
|> assign(:show_subscription, false)
|> assign(
:subscribed,
Payments.has_channel_subscription?(socket.assigns.user, socket.assigns.streamer)
)}
else
{:error, error_msg} ->
{:noreply,
socket |> assign(:user, Accounts.get_user!(user.id)) |> assign(:stripe_error, error_msg)}
end
end
@impl true
def handle_event("unsubscribe", _value, socket) do
streamer = socket.assigns.streamer
user = socket.assigns.user
subscription = Payments.get_channel_subscription!(user, streamer)
case Payments.unsubscribe(subscription) do
{:ok, _} ->
{:noreply, socket |> assign(:canceling, true)}
{:error, error_msg} ->
{:noreply, socket |> assign(:stripe_error, error_msg)}
end
end
@impl true
def handle_event("resubscribe", _value, socket) do
streamer = socket.assigns.streamer
user = socket.assigns.user
subscription = Payments.get_channel_subscription!(user, streamer)
case Payments.resubscribe(subscription) do
{:ok, _} ->
{:noreply, socket |> assign(:show_resub_modal, false) |> assign(:canceling, false)}
{:error, error_msg} ->
{:noreply, socket |> assign(:stripe_error, error_msg)}
end
end
end
| 39.217949 | 203 | 0.628637 |
792323caa9d85f4926a63814a69de5fb521fcb07 | 450 | exs | Elixir | test/shared/cache_test.exs | alexandrubagu/nebulex | c8870a91644545f7a10a6af825ef6822a40fadf4 | [
"MIT"
] | 845 | 2017-02-14T14:16:11.000Z | 2022-03-30T04:13:08.000Z | test/shared/cache_test.exs | alexandrubagu/nebulex | c8870a91644545f7a10a6af825ef6822a40fadf4 | [
"MIT"
] | 146 | 2017-04-29T16:11:14.000Z | 2022-03-29T08:49:05.000Z | test/shared/cache_test.exs | alexandrubagu/nebulex | c8870a91644545f7a10a6af825ef6822a40fadf4 | [
"MIT"
] | 50 | 2017-08-17T13:44:06.000Z | 2022-03-30T11:29:59.000Z | defmodule Nebulex.CacheTest do
@moduledoc """
Shared Tests
"""
defmacro __using__(_opts) do
quote do
use Nebulex.Cache.EntryTest
use Nebulex.Cache.EntryExpirationTest
use Nebulex.Cache.EntryPropTest
use Nebulex.Cache.QueryableTest
use Nebulex.Cache.TransactionTest
use Nebulex.Cache.PersistenceTest
use Nebulex.Cache.PersistenceErrorTest
use Nebulex.Cache.DeprecatedTest
end
end
end
| 23.684211 | 44 | 0.722222 |
79232e6ad5e3a51721978a07740ac99427ebbe4f | 1,451 | ex | Elixir | lib/lavapotion/stage/producer.ex | SamOphis/lavapotion | ccc5b144cc0eff554fc27dacf215e83842e13810 | [
"Apache-2.0"
] | 3 | 2018-11-25T00:57:43.000Z | 2019-05-16T13:13:26.000Z | lib/lavapotion/stage/producer.ex | SamOphis/lavapotion | ccc5b144cc0eff554fc27dacf215e83842e13810 | [
"Apache-2.0"
] | 2 | 2018-12-13T22:54:49.000Z | 2018-12-13T22:58:48.000Z | lib/lavapotion/stage/producer.ex | SamOphis/lavapotion | ccc5b144cc0eff554fc27dacf215e83842e13810 | [
"Apache-2.0"
] | 1 | 2019-01-16T23:16:37.000Z | 2019-01-16T23:16:37.000Z | # Copyright 2018 Sam Pritchard
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule LavaPotion.Stage.Producer do
use GenStage
def start_link() do
GenStage.start_link(__MODULE__, [], name: __MODULE__)
end
def init(_state) do
{:producer, {:queue.new(), 0}}
end
def notify(event) do
GenStage.cast(__MODULE__, {:notify, event})
end
def handle_demand(new, {queue, demand}) do
queue_events({demand + new, []}, queue)
end
def handle_cast({:notify, event}, {queue, demand}) do
queue = :queue.in(event, queue)
queue_events({demand, []}, queue)
end
defp queue_events({0, current}, queue) do
{:noreply, Enum.reverse(current), {queue, 0}}
end
defp queue_events({demand, current}, queue) do
case :queue.out(queue) do
{{:value, val}, queue} ->
queue_events({demand - 1, [val | current]}, queue)
_ ->
{:noreply, Enum.reverse(current), {queue, demand}}
end
end
end | 29.02 | 74 | 0.684356 |
79235247d5dd05a12816f19611e55138540d1054 | 1,102 | exs | Elixir | config/dev.exs | wsmoak/my_app_810758 | 2fe9e2ed05f9df4577676e2029910ff59d6fce89 | [
"MIT"
] | null | null | null | config/dev.exs | wsmoak/my_app_810758 | 2fe9e2ed05f9df4577676e2029910ff59d6fce89 | [
"MIT"
] | null | null | null | config/dev.exs | wsmoak/my_app_810758 | 2fe9e2ed05f9df4577676e2029910ff59d6fce89 | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :my_app_810758, MyApp_810758.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
cache_static_lookup: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch", "--stdin"]]
# Watch static and templates for browser reloading.
config :my_app_810758, MyApp_810758.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif)$},
~r{web/views/.*(ex)$},
~r{web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Configure your database
config :my_app_810758, MyApp_810758.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "my_app_810758_dev",
size: 10 # The amount of database connections in the pool
| 30.611111 | 74 | 0.715971 |
79235d6d90fdcc750b05f261a4c48606f666dbff | 1,679 | ex | Elixir | clients/admin/lib/google_api/admin/directory_v1/model/chrome_os_device_recent_users.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/admin/lib/google_api/admin/directory_v1/model/chrome_os_device_recent_users.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/admin/lib/google_api/admin/directory_v1/model/chrome_os_device_recent_users.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Admin.Directory_v1.Model.ChromeOsDeviceRecentUsers do
@moduledoc """
List of recent device users, in descending order, by last login time.
## Attributes
* `email` (*type:* `String.t`, *default:* `nil`) - The user's email address. This is only present if the user type is `USER_TYPE_MANAGED`.
* `type` (*type:* `String.t`, *default:* `nil`) - The type of the user.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:email => String.t() | nil,
:type => String.t() | nil
}
field(:email)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.Admin.Directory_v1.Model.ChromeOsDeviceRecentUsers do
def decode(value, options) do
GoogleApi.Admin.Directory_v1.Model.ChromeOsDeviceRecentUsers.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Admin.Directory_v1.Model.ChromeOsDeviceRecentUsers do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.58 | 142 | 0.727219 |
79236af00d40a26541b5b979c1c45459141d0f99 | 3,627 | exs | Elixir | test/rust/fn_test.exs | patefacio/kojin | 34f1959b7fa5fdc48e07ae6d81f8fa3ab31af982 | [
"MIT"
] | 3 | 2021-12-20T04:46:00.000Z | 2022-01-31T22:26:44.000Z | test/rust/fn_test.exs | patefacio/kojin | 34f1959b7fa5fdc48e07ae6d81f8fa3ab31af982 | [
"MIT"
] | 1 | 2021-12-20T14:46:50.000Z | 2021-12-20T14:46:50.000Z | test/rust/fn_test.exs | patefacio/kojin | 34f1959b7fa5fdc48e07ae6d81f8fa3ab31af982 | [
"MIT"
] | 1 | 2021-12-20T00:15:52.000Z | 2021-12-20T00:15:52.000Z | defmodule FnTest do
use ExUnit.Case
import Kojin
import Kojin.Rust.{Type, Fn, Parm, ToCode}
alias Kojin.Rust.Fn
import TestHelper
test "fn sigs" do
doc = "A simple function"
f = fun("f", doc, [])
assert f.name == :f
assert f.doc == doc
parm1 = parm(:a, :A, "An a")
f = fun(:f, doc, [parm1], :i32)
assert Enum.at(f.parms, 0) == parm1
assert f.return == type(:i32)
assert f.return_doc == ""
f = fun(:f, doc, [parm1], :i32, "calculated f")
assert f.return_doc == "calculated f"
mparm1 = parm(:a, :A, doc: "An a", mut: true)
f = fun(:f, doc, [mparm1])
assert String.contains?(Fn.code(f), "f(mut a: A)")
assert to_code(f) == "#{f}"
end
test "fn with no args" do
assert dark_matter(
fun(
:do_it,
"Foo with no return.",
[],
inline: true
)
) ==
dark_matter("""
/// Foo with no return.
#[inline]
fn do_it() {
// α <fn do_it>
// ω <fn do_it>
}
""")
end
test "fn with args and no return" do
assert dark_matter(
fun(
:do_it,
"Foo with no return.",
[
parm(:a, :A, doc: "Your basic A")
],
inline: true
)
) ==
dark_matter("""
/// Foo with no return.
///
/// * `a` - Your basic A
#[inline]
fn do_it(a: A) {
// α <fn do_it>
// ω <fn do_it>
}
""")
end
test "fn with args and return" do
dark_compare(
fun(
:do_it,
"Foo does your basic `foo` stuff.",
[
parm(:a, :A, doc: "Your basic A"),
parm(:b, :B, mut: true, doc: "The `b` to foo"),
parm(:c, :C, doc: "Required")
],
return: :i32,
inline: true
),
"""
/// Foo does your basic `foo` stuff.
///
/// * `a` - Your basic A
/// * `b` - The `b` to foo
/// * `c` - Required
/// * _return_ - TODO: document return
#[inline]
fn do_it(a: A, mut b: B, c: C) -> i32 {
// α <fn do_it>
// ω <fn do_it>
}
"""
)
end
test "fn no args, simplified return" do
dark_compare(
fun(:f, "An f.", [], return: {:i32, "Badabing"}),
"
/// An f.
///
/// * _return_ - Badabing
fn f() -> i32 {
// α <fn f>
// ω <fn f>
}
"
)
end
test "fn with generic" do
f =
fun(
:do_it,
"Magic do it function",
[
parm(:a, ref(:A, :a), doc: "Your basic A"),
parm(:b, mref(:B, :b), mut: true, doc: "The `b` to foo"),
[:c, :C, doc: "Required"],
[:d, :i32]
],
generic: [
type_parms: [:T1, [:T3, bounds: [:a, :b, "Infinite", "Collapsible", "Responsible"]]],
lifetimes: [:a, :b]
],
return: {:i32, "Foo"},
inline: true
)
dark_compare(f, """
/// Magic do it function
///
/// * `a` - Your basic A
/// * `b` - The `b` to foo
/// * `c` - Required
/// * `d` - TODO: Comment d
/// * _return_ - Foo
#[inline]
fn do_it<'a, 'b, T1, T3>(a: & 'a A, mut b: & 'b mut B, c: C, d: i32) -> i32
where
T3: 'a + 'b + Infinite + Collapsible + Responsible {
// α <fn do_it>
// ω <fn do_it>
}
""")
end
end
| 23.4 | 95 | 0.403364 |
7923e95a4b923d18042f9a4cc0617aec0ba6624a | 3,411 | exs | Elixir | test/igwet/admin/admin_test.exs | TheSwanFactory/igwet | 0a450686d1d222eb8e39e23ba5d2ea83657862d1 | [
"MIT"
] | null | null | null | test/igwet/admin/admin_test.exs | TheSwanFactory/igwet | 0a450686d1d222eb8e39e23ba5d2ea83657862d1 | [
"MIT"
] | 18 | 2018-02-25T11:13:46.000Z | 2022-03-28T03:43:38.000Z | test/igwet/admin/admin_test.exs | TheSwanFactory/igwet | 0a450686d1d222eb8e39e23ba5d2ea83657862d1 | [
"MIT"
] | 1 | 2019-01-04T12:16:47.000Z | 2019-01-04T12:16:47.000Z | defmodule Igwet.AdminTest do
use Igwet.DataCase
doctest Igwet.Admin
alias Igwet.Admin
describe "users" do
alias Igwet.Admin.User
@valid_attrs %{
authid: "some authid",
avatar: "some avatar",
last_login: ~N[2010-04-17 14:00:00],
name: "some name",
email: "[email protected]"
}
@update_attrs %{
authid: "some updated authid",
avatar: "some updated avatar",
last_login: ~N[2011-05-18 15:01:01],
name: "some updated name"
}
@invalid_attrs %{authid: nil, avatar: nil, last_login: nil, name: nil}
@next_attrs %{
authid: "next authid",
avatar: "next avatar",
last_login: ~N[2010-04-17 14:00:00],
name: "next name"
}
def user_fixture(attrs \\ %{}) do
{:ok, user} =
attrs
|> Enum.into(@valid_attrs)
|> Admin.create_user()
user
end
def first() do
user = user_fixture()
Admin.get_user!(user.id)
end
test "is_admin" do
user = user_fixture()
assert nil == Admin.is_admin(user)
assert true == Admin.is_admin(Admin.test_admin_user(true))
assert false == Admin.is_admin(Admin.test_admin_user(false))
end
test "list_users/0 returns all users" do
user = user_fixture()
assert Admin.list_users() == [user]
end
test "get_user!/1 returns the user with given id" do
user = user_fixture()
assert Admin.get_user!(user.id) == user
end
test "find_or_create_user/1 with existing data returns that user" do
first = first()
assert user = Admin.find_or_create_user(@valid_attrs)
assert %User{} = user
assert user.id == first.id
assert user.email == "[email protected]"
end
test "find_or_create_user/1 with new data creates user" do
first = first()
assert user = Admin.find_or_create_user(@next_attrs)
assert %User{} = user
assert user.id != first.id
end
test "create_user/1 with valid data creates a user" do
assert {:ok, %User{} = user} = Admin.create_user(@valid_attrs)
assert user.authid == "some authid"
assert user.avatar == "some avatar"
assert user.last_login == ~N[2010-04-17 14:00:00]
assert user.name == "some name"
end
test "create_user/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Admin.create_user(@invalid_attrs)
end
test "update_user/2 with valid data updates the user" do
user = user_fixture()
assert {:ok, user} = Admin.update_user(user, @update_attrs)
assert %User{} = user
assert user.authid == "some updated authid"
assert user.avatar == "some updated avatar"
assert user.last_login == ~N[2011-05-18 15:01:01]
assert user.name == "some updated name"
end
test "update_user/2 with invalid data returns error changeset" do
user = user_fixture()
assert {:error, %Ecto.Changeset{}} = Admin.update_user(user, @invalid_attrs)
assert user == Admin.get_user!(user.id)
end
test "delete_user/1 deletes the user" do
user = user_fixture()
assert {:ok, %User{}} = Admin.delete_user(user)
assert_raise Ecto.NoResultsError, fn -> Admin.get_user!(user.id) end
end
test "change_user/1 returns a user changeset" do
user = user_fixture()
assert %Ecto.Changeset{} = Admin.change_user(user)
end
end
end
| 29.405172 | 82 | 0.627382 |
7923eded6db36cc6b74c4330c5ad16126a03bf22 | 537 | exs | Elixir | config/config.exs | zacky1972/basic_math | 78f65a731fc57e2a45324fe55005151c29d45f54 | [
"Apache-2.0"
] | null | null | null | config/config.exs | zacky1972/basic_math | 78f65a731fc57e2a45324fe55005151c29d45f54 | [
"Apache-2.0"
] | null | null | null | config/config.exs | zacky1972/basic_math | 78f65a731fc57e2a45324fe55005151c29d45f54 | [
"Apache-2.0"
] | 1 | 2021-12-07T01:52:18.000Z | 2021-12-07T01:52:18.000Z | # `import Config` instead of `use Mix.Config` when Elixir 1.9+
use Mix.Config
if Mix.env() == :dev do
config :git_hooks,
auto_install: true,
verbose: true,
branches: [
whitelist: ["feature-.*"],
blacklist: ["main"]
],
hooks: [
pre_commit: [
tasks: [
{:cmd, "mix format --check-formatted"}
]
],
pre_push: [
tasks: [
{:cmd, "mix dialyzer"},
{:cmd, "mix test --color"},
{:cmd, "echo 'success!"}
]
]
]
end
| 19.888889 | 62 | 0.472998 |
7923f21e3901ce1ad70fa627ec2ded419ae07841 | 1,294 | exs | Elixir | mix.exs | Qqwy/elixir_currying | f673e2d24728ebe449a3f8752a2617f0d56dc7b7 | [
"MIT"
] | 29 | 2016-07-09T21:22:22.000Z | 2019-05-07T18:19:26.000Z | mix.exs | Qqwy/elixir-currying | f673e2d24728ebe449a3f8752a2617f0d56dc7b7 | [
"MIT"
] | 26 | 2019-08-30T05:26:02.000Z | 2022-03-24T07:14:22.000Z | mix.exs | Qqwy/elixir-currying | f673e2d24728ebe449a3f8752a2617f0d56dc7b7 | [
"MIT"
] | 2 | 2017-06-22T05:21:17.000Z | 2018-10-01T09:08:14.000Z | defmodule Currying.Mixfile do
use Mix.Project
def project do
[app: :currying,
version: "1.0.3",
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps(),
package: package(),
description: description()
]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:dialyxir, "~> 1.1", only: [:dev]},
{:ex_doc, ">= 0.0.0", only: :dev}
]
end
def package do
[
files: ["lib", "mix.exs", "README*", "LICENSE*"],
maintainers: ["Qqwy/Wiebe-Marten"],
licenses: ["MIT"],
links: %{Github: "https://github.com/qqwy/elixir_currying"}
]
end
defp description do
"""
The Currying library allows you to partially apply (or 'curry') any Elixir function, in a very transparent way.
It also optionally implements the infix operator `~>` as a synomym for currying.
"""
end
end
| 23.527273 | 115 | 0.595827 |
7923f5835946561fc7988beb117d4a6d3bfc17ec | 821 | ex | Elixir | lib/ipay88/signature.ex | thekirinlab/ipay88 | 9481f432a246a6e47cfd5c7954f200cdf4c88c0e | [
"Apache-2.0"
] | 6 | 2021-07-26T09:39:43.000Z | 2021-09-06T01:50:10.000Z | lib/ipay88/signature.ex | thekirinlab/ipay88 | 9481f432a246a6e47cfd5c7954f200cdf4c88c0e | [
"Apache-2.0"
] | null | null | null | lib/ipay88/signature.ex | thekirinlab/ipay88 | 9481f432a246a6e47cfd5c7954f200cdf4c88c0e | [
"Apache-2.0"
] | null | null | null | defmodule IPay88.Signature do
@moduledoc """
Generate signatures for iPay88
"""
alias IPay88.Config
@doc """
Generate signature using SHA-256
## Examples
iex> IPay88.Signature.generate_payment_request_signature("A00000001", "1.00", "MYR")
"110f0be755ccfa9373aa38104bafbc5c6e5462344e44bcfbb70439c82b4b07fa"
"""
def generate_payment_request_signature(ref_no, amount, currency \\ "MYR") do
[
Config.merchant_key(),
Config.merchant_code(),
ref_no,
strip_non_number(amount),
String.upcase(currency)
]
|> Enum.join("")
|> encode_string()
end
defp strip_non_number(amount) do
String.replace("#{amount}", ~r/\D/, "")
end
defp encode_string(string) do
:crypto.hash(:sha256, string) |> Base.encode16() |> String.downcase()
end
end
| 22.805556 | 90 | 0.671133 |
792419bb85ad61be197e56a6537cc26611b35a52 | 19,521 | exs | Elixir | test/basic/accounts_test.exs | ysaito8015/communitex | d469447a62029d59883d95df4df3c9b09e0022e2 | [
"Apache-2.0"
] | 7 | 2021-07-14T15:45:55.000Z | 2022-01-25T11:13:01.000Z | test/basic/accounts_test.exs | ysaito8015/communitex | d469447a62029d59883d95df4df3c9b09e0022e2 | [
"Apache-2.0"
] | 10 | 2021-08-09T15:54:05.000Z | 2022-02-17T04:18:38.000Z | test/basic/accounts_test.exs | ysaito8015/communitex | d469447a62029d59883d95df4df3c9b09e0022e2 | [
"Apache-2.0"
] | 5 | 2021-07-23T05:54:35.000Z | 2022-01-28T04:14:51.000Z | defmodule Basic.AccountsTest do
use Basic.DataCase
alias Basic.Accounts
import Basic.AccountsFixtures
alias Basic.Accounts.{User, UserToken}
describe "get_user_by_email/1" do
test "does not return the user if the email does not exist" do
refute Accounts.get_user_by_email("[email protected]")
end
test "returns the user if the email exists" do
%{id: id} = user = user_fixture()
assert %User{id: ^id} = Accounts.get_user_by_email(user.email)
end
end
describe "get_user_by_email_and_password/2" do
test "does not return the user if the email does not exist" do
refute Accounts.get_user_by_email_and_password("[email protected]", "hello world!")
end
test "does not return the user if the password is not valid" do
user = user_fixture()
refute Accounts.get_user_by_email_and_password(user.email, "invalid")
end
test "returns the user if the email and password are valid" do
%{id: id} = user = user_fixture()
assert %User{id: ^id} =
Accounts.get_user_by_email_and_password(user.email, valid_user_password())
end
end
describe "get_user!/1" do
test "raises if id is invalid" do
assert_raise Ecto.NoResultsError, fn ->
Accounts.get_user!(-1)
end
end
test "returns the user with the given id" do
%{id: id} = user = user_fixture()
assert %User{id: ^id} = Accounts.get_user!(user.id)
end
end
describe "register_user/1" do
test "requires email and password to be set" do
{:error, changeset} = Accounts.register_user(%{})
assert %{
password: ["can't be blank"],
email: ["can't be blank"]
} = errors_on(changeset)
end
test "validates email and password when given" do
{:error, changeset} = Accounts.register_user(%{email: "not valid", password: "not valid"})
assert %{
email: ["must have the @ sign and no spaces"],
password: ["should be at least 12 character(s)"]
} = errors_on(changeset)
end
test "validates maximum values for email and password for security" do
too_long = String.duplicate("db", 100)
{:error, changeset} = Accounts.register_user(%{email: too_long, password: too_long})
assert "should be at most 160 character(s)" in errors_on(changeset).email
assert "should be at most 80 character(s)" in errors_on(changeset).password
end
test "validates email uniqueness" do
%{email: email} = user_fixture()
{:error, changeset} = Accounts.register_user(%{email: email})
assert "has already been taken" in errors_on(changeset).email
# Now try with the upper cased email too, to check that email case is ignored.
{:error, changeset} = Accounts.register_user(%{email: String.upcase(email)})
assert "has already been taken" in errors_on(changeset).email
end
test "registers users with a hashed password" do
email = unique_user_email()
{:ok, user} = Accounts.register_user(valid_user_attributes(email: email))
assert user.email == email
assert is_binary(user.hashed_password)
assert is_nil(user.confirmed_at)
assert is_nil(user.password)
end
end
describe "change_user_registration/2" do
test "returns a changeset" do
assert %Ecto.Changeset{} = changeset = Accounts.change_user_registration(%User{})
assert changeset.required == [:password, :email]
end
test "allows fields to be set" do
email = unique_user_email()
password = valid_user_password()
changeset =
Accounts.change_user_registration(
%User{},
valid_user_attributes(email: email, password: password)
)
assert changeset.valid?
assert get_change(changeset, :email) == email
assert get_change(changeset, :password) == password
assert is_nil(get_change(changeset, :hashed_password))
end
end
describe "change_user_email/2" do
test "returns a user changeset" do
assert %Ecto.Changeset{} = changeset = Accounts.change_user_email(%User{})
assert changeset.required == [:email]
end
end
describe "apply_user_email/3" do
setup do
%{user: user_fixture()}
end
test "requires email to change", %{user: user} do
{:error, changeset} = Accounts.apply_user_email(user, valid_user_password(), %{})
assert %{email: ["did not change"]} = errors_on(changeset)
end
test "validates email", %{user: user} do
{:error, changeset} =
Accounts.apply_user_email(user, valid_user_password(), %{email: "not valid"})
assert %{email: ["must have the @ sign and no spaces"]} = errors_on(changeset)
end
test "validates maximum value for email for security", %{user: user} do
too_long = String.duplicate("db", 100)
{:error, changeset} =
Accounts.apply_user_email(user, valid_user_password(), %{email: too_long})
assert "should be at most 160 character(s)" in errors_on(changeset).email
end
test "validates email uniqueness", %{user: user} do
%{email: email} = user_fixture()
{:error, changeset} =
Accounts.apply_user_email(user, valid_user_password(), %{email: email})
assert "has already been taken" in errors_on(changeset).email
end
test "validates current password", %{user: user} do
{:error, changeset} =
Accounts.apply_user_email(user, "invalid", %{email: unique_user_email()})
assert %{current_password: ["is not valid"]} = errors_on(changeset)
end
test "applies the email without persisting it", %{user: user} do
email = unique_user_email()
{:ok, user} = Accounts.apply_user_email(user, valid_user_password(), %{email: email})
assert user.email == email
assert Accounts.get_user!(user.id).email != email
end
end
describe "deliver_update_email_instructions/3" do
setup do
%{user: user_fixture()}
end
test "sends token through notification", %{user: user} do
token =
extract_user_token(fn url ->
Accounts.deliver_update_email_instructions(user, "[email protected]", url)
end)
{:ok, token} = Base.url_decode64(token, padding: false)
assert user_token = Repo.get_by(UserToken, token: :crypto.hash(:sha256, token))
assert user_token.user_id == user.id
assert user_token.sent_to == user.email
assert user_token.context == "change:[email protected]"
end
end
describe "update_user_email/2" do
setup do
user = user_fixture()
email = unique_user_email()
token =
extract_user_token(fn url ->
Accounts.deliver_update_email_instructions(%{user | email: email}, user.email, url)
end)
%{user: user, token: token, email: email}
end
test "updates the email with a valid token", %{user: user, token: token, email: email} do
assert Accounts.update_user_email(user, token) == :ok
changed_user = Repo.get!(User, user.id)
assert changed_user.email != user.email
assert changed_user.email == email
assert changed_user.confirmed_at
assert changed_user.confirmed_at != user.confirmed_at
refute Repo.get_by(UserToken, user_id: user.id)
end
test "does not update email with invalid token", %{user: user} do
assert Accounts.update_user_email(user, "oops") == :error
assert Repo.get!(User, user.id).email == user.email
assert Repo.get_by(UserToken, user_id: user.id)
end
test "does not update email if user email changed", %{user: user, token: token} do
assert Accounts.update_user_email(%{user | email: "[email protected]"}, token) == :error
assert Repo.get!(User, user.id).email == user.email
assert Repo.get_by(UserToken, user_id: user.id)
end
test "does not update email if token expired", %{user: user, token: token} do
{1, nil} = Repo.update_all(UserToken, set: [inserted_at: ~N[2020-01-01 00:00:00]])
assert Accounts.update_user_email(user, token) == :error
assert Repo.get!(User, user.id).email == user.email
assert Repo.get_by(UserToken, user_id: user.id)
end
end
describe "change_user_password/2" do
test "returns a user changeset" do
assert %Ecto.Changeset{} = changeset = Accounts.change_user_password(%User{})
assert changeset.required == [:password]
end
test "allows fields to be set" do
changeset =
Accounts.change_user_password(%User{}, %{
"password" => "new valid password"
})
assert changeset.valid?
assert get_change(changeset, :password) == "new valid password"
assert is_nil(get_change(changeset, :hashed_password))
end
end
describe "update_user_password/3" do
setup do
%{user: user_fixture()}
end
test "validates password", %{user: user} do
{:error, changeset} =
Accounts.update_user_password(user, valid_user_password(), %{
password: "not valid",
password_confirmation: "another"
})
assert %{
password: ["should be at least 12 character(s)"],
password_confirmation: ["does not match password"]
} = errors_on(changeset)
end
test "validates maximum values for password for security", %{user: user} do
too_long = String.duplicate("db", 100)
{:error, changeset} =
Accounts.update_user_password(user, valid_user_password(), %{password: too_long})
assert "should be at most 80 character(s)" in errors_on(changeset).password
end
test "validates current password", %{user: user} do
{:error, changeset} =
Accounts.update_user_password(user, "invalid", %{password: valid_user_password()})
assert %{current_password: ["is not valid"]} = errors_on(changeset)
end
test "updates the password", %{user: user} do
{:ok, user} =
Accounts.update_user_password(user, valid_user_password(), %{
password: "new valid password"
})
assert is_nil(user.password)
assert Accounts.get_user_by_email_and_password(user.email, "new valid password")
end
test "deletes all tokens for the given user", %{user: user} do
_ = Accounts.generate_user_session_token(user)
{:ok, _} =
Accounts.update_user_password(user, valid_user_password(), %{
password: "new valid password"
})
refute Repo.get_by(UserToken, user_id: user.id)
end
end
describe "generate_user_session_token/1" do
setup do
%{user: user_fixture()}
end
test "generates a token", %{user: user} do
token = Accounts.generate_user_session_token(user)
assert user_token = Repo.get_by(UserToken, token: token)
assert user_token.context == "session"
# Creating the same token for another user should fail
assert_raise Ecto.ConstraintError, fn ->
Repo.insert!(%UserToken{
token: user_token.token,
user_id: user_fixture().id,
context: "session"
})
end
end
end
describe "get_user_by_session_token/1" do
setup do
user = user_fixture()
token = Accounts.generate_user_session_token(user)
%{user: user, token: token}
end
test "returns user by token", %{user: user, token: token} do
assert session_user = Accounts.get_user_by_session_token(token)
assert session_user.id == user.id
end
test "does not return user for invalid token" do
refute Accounts.get_user_by_session_token("oops")
end
test "does not return user for expired token", %{token: token} do
{1, nil} = Repo.update_all(UserToken, set: [inserted_at: ~N[2020-01-01 00:00:00]])
refute Accounts.get_user_by_session_token(token)
end
end
describe "delete_session_token/1" do
test "deletes the token" do
user = user_fixture()
token = Accounts.generate_user_session_token(user)
assert Accounts.delete_session_token(token) == :ok
refute Accounts.get_user_by_session_token(token)
end
end
describe "deliver_user_confirmation_instructions/2" do
setup do
%{user: user_fixture()}
end
test "sends token through notification", %{user: user} do
token =
extract_user_token(fn url ->
Accounts.deliver_user_confirmation_instructions(user, url)
end)
{:ok, token} = Base.url_decode64(token, padding: false)
assert user_token = Repo.get_by(UserToken, token: :crypto.hash(:sha256, token))
assert user_token.user_id == user.id
assert user_token.sent_to == user.email
assert user_token.context == "confirm"
end
end
describe "confirm_user/1" do
setup do
user = user_fixture()
token =
extract_user_token(fn url ->
Accounts.deliver_user_confirmation_instructions(user, url)
end)
%{user: user, token: token}
end
test "confirms the email with a valid token", %{user: user, token: token} do
assert {:ok, confirmed_user} = Accounts.confirm_user(token)
assert confirmed_user.confirmed_at
assert confirmed_user.confirmed_at != user.confirmed_at
assert Repo.get!(User, user.id).confirmed_at
refute Repo.get_by(UserToken, user_id: user.id)
end
test "does not confirm with invalid token", %{user: user} do
assert Accounts.confirm_user("oops") == :error
refute Repo.get!(User, user.id).confirmed_at
assert Repo.get_by(UserToken, user_id: user.id)
end
test "does not confirm email if token expired", %{user: user, token: token} do
{1, nil} = Repo.update_all(UserToken, set: [inserted_at: ~N[2020-01-01 00:00:00]])
assert Accounts.confirm_user(token) == :error
refute Repo.get!(User, user.id).confirmed_at
assert Repo.get_by(UserToken, user_id: user.id)
end
end
describe "deliver_user_reset_password_instructions/2" do
setup do
%{user: user_fixture()}
end
test "sends token through notification", %{user: user} do
token =
extract_user_token(fn url ->
Accounts.deliver_user_reset_password_instructions(user, url)
end)
{:ok, token} = Base.url_decode64(token, padding: false)
assert user_token = Repo.get_by(UserToken, token: :crypto.hash(:sha256, token))
assert user_token.user_id == user.id
assert user_token.sent_to == user.email
assert user_token.context == "reset_password"
end
end
describe "get_user_by_reset_password_token/1" do
setup do
user = user_fixture()
token =
extract_user_token(fn url ->
Accounts.deliver_user_reset_password_instructions(user, url)
end)
%{user: user, token: token}
end
test "returns the user with valid token", %{user: %{id: id}, token: token} do
assert %User{id: ^id} = Accounts.get_user_by_reset_password_token(token)
assert Repo.get_by(UserToken, user_id: id)
end
test "does not return the user with invalid token", %{user: user} do
refute Accounts.get_user_by_reset_password_token("oops")
assert Repo.get_by(UserToken, user_id: user.id)
end
test "does not return the user if token expired", %{user: user, token: token} do
{1, nil} = Repo.update_all(UserToken, set: [inserted_at: ~N[2020-01-01 00:00:00]])
refute Accounts.get_user_by_reset_password_token(token)
assert Repo.get_by(UserToken, user_id: user.id)
end
end
describe "reset_user_password/2" do
setup do
%{user: user_fixture()}
end
test "validates password", %{user: user} do
{:error, changeset} =
Accounts.reset_user_password(user, %{
password: "not valid",
password_confirmation: "another"
})
assert %{
password: ["should be at least 12 character(s)"],
password_confirmation: ["does not match password"]
} = errors_on(changeset)
end
test "validates maximum values for password for security", %{user: user} do
too_long = String.duplicate("db", 100)
{:error, changeset} = Accounts.reset_user_password(user, %{password: too_long})
assert "should be at most 80 character(s)" in errors_on(changeset).password
end
test "updates the password", %{user: user} do
{:ok, updated_user} = Accounts.reset_user_password(user, %{password: "new valid password"})
assert is_nil(updated_user.password)
assert Accounts.get_user_by_email_and_password(user.email, "new valid password")
end
test "deletes all tokens for the given user", %{user: user} do
_ = Accounts.generate_user_session_token(user)
{:ok, _} = Accounts.reset_user_password(user, %{password: "new valid password"})
refute Repo.get_by(UserToken, user_id: user.id)
end
end
describe "inspect/2" do
test "does not include password" do
refute inspect(%User{password: "123456"}) =~ "password: \"123456\""
end
end
describe "users" do
alias Basic.Accounts.User
@valid_attrs %{confirmed_at: ~N[2010-04-17 14:00:00], deleted_at: ~N[2010-04-17 14:00:00], email: "some email", hashed_password: "some hashed_password"}
@update_attrs %{confirmed_at: ~N[2011-05-18 15:01:01], deleted_at: ~N[2011-05-18 15:01:01], email: "some updated email", hashed_password: "some updated hashed_password"}
@invalid_attrs %{confirmed_at: nil, deleted_at: nil, email: nil, hashed_password: nil}
def user_fixture(attrs \\ %{}) do
{:ok, user} =
attrs
|> Enum.into(@valid_attrs)
|> Accounts.create_user()
user
end
test "list_users/0 returns all users" do
user = user_fixture()
assert Accounts.list_users() == [user]
end
test "get_user!/1 returns the user with given id" do
user = user_fixture()
assert Accounts.get_user!(user.id) == user
end
test "create_user/1 with valid data creates a user" do
assert {:ok, %User{} = user} = Accounts.create_user(@valid_attrs)
assert user.confirmed_at == ~N[2010-04-17 14:00:00]
assert user.deleted_at == ~N[2010-04-17 14:00:00]
assert user.email == "some email"
assert user.hashed_password == "some hashed_password"
end
test "create_user/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Accounts.create_user(@invalid_attrs)
end
test "update_user/2 with valid data updates the user" do
user = user_fixture()
assert {:ok, %User{} = user} = Accounts.update_user(user, @update_attrs)
assert user.confirmed_at == ~N[2011-05-18 15:01:01]
assert user.deleted_at == ~N[2011-05-18 15:01:01]
assert user.email == "some updated email"
assert user.hashed_password == "some updated hashed_password"
end
test "update_user/2 with invalid data returns error changeset" do
user = user_fixture()
assert {:error, %Ecto.Changeset{}} = Accounts.update_user(user, @invalid_attrs)
assert user == Accounts.get_user!(user.id)
end
test "delete_user/1 deletes the user" do
user = user_fixture()
assert {:ok, %User{}} = Accounts.delete_user(user)
assert_raise Ecto.NoResultsError, fn -> Accounts.get_user!(user.id) end
end
test "change_user/1 returns a user changeset" do
user = user_fixture()
assert %Ecto.Changeset{} = Accounts.change_user(user)
end
end
end
| 34.068063 | 173 | 0.659239 |
7924a415db98b7d7ceac097c1514e810ea4c62e2 | 1,089 | ex | Elixir | apps/reaper/lib/reaper/decoder/csv.ex | PillarTechnology/smartcitiesdata | 9420a26820e38267513cd1bfa82c7f5583222bb1 | [
"Apache-2.0"
] | 1 | 2021-04-05T19:17:18.000Z | 2021-04-05T19:17:18.000Z | apps/reaper/lib/reaper/decoder/csv.ex | AWHServiceAccount/smartcitiesdata | 6957afac12809288640b6ba6b576c3016e6033d7 | [
"Apache-2.0"
] | 11 | 2020-01-07T15:43:42.000Z | 2020-12-22T15:23:25.000Z | apps/reaper/lib/reaper/decoder/csv.ex | jakeprem/smartcitiesdata | da309ac0d2261527278951cbae88604455207589 | [
"Apache-2.0"
] | null | null | null | NimbleCSV.define(CsvParser, [])
defmodule Reaper.Decoder.Csv do
@moduledoc """
Reaper.Decoder implementation to decode a csv file into a stream of records
"""
defmodule CsvError do
defexception [:message]
end
@behaviour Reaper.Decoder
@impl Reaper.Decoder
def decode({:file, filename}, %SmartCity.Dataset{technical: %{schema: schema}} = dataset) do
try do
keys = Enum.map(schema, fn el -> el.name end)
stream =
filename
|> File.stream!()
|> Stream.reject(fn line -> String.trim(line) == "" end)
|> CsvParser.parse_stream(skip_headers: false)
|> Stream.reject(&header?(&1, keys))
|> Stream.map(fn row -> keys |> Enum.zip(row) |> Map.new() end)
{:ok, stream}
rescue
error ->
{:error, "DatasetId: #{dataset.id}", error}
end
end
defp header?(row, keys) do
keys
|> Enum.zip(row)
|> Enum.all?(fn {key, val} -> key == String.trim(val) end)
end
@impl Reaper.Decoder
def handle?(source_format) do
String.downcase(source_format) == "text/csv"
end
end
| 24.2 | 94 | 0.61157 |
7924b9e361bf5313d7b97710482d5609c261d82e | 981 | exs | Elixir | test/models/appointment_test.exs | simwms/apiv4 | c3da7407eaf3580b759f49726028439b4b8ea9d0 | [
"MIT"
] | 2 | 2016-02-25T20:12:35.000Z | 2018-01-03T00:03:12.000Z | test/models/appointment_test.exs | simwms/apiv4 | c3da7407eaf3580b759f49726028439b4b8ea9d0 | [
"MIT"
] | 1 | 2016-01-11T04:50:39.000Z | 2016-01-12T05:00:08.000Z | test/models/appointment_test.exs | simwms/apiv4 | c3da7407eaf3580b759f49726028439b4b8ea9d0 | [
"MIT"
] | null | null | null | defmodule Apiv4.AppointmentTest do
use Apiv4.ModelCase
alias Apiv4.Appointment
test "creation should work" do
account = build_account
company = build_company account
appointment_params = %{
"external_reference" => "fajskdf234",
"company_id" => company.id,
"golive_at" => Ecto.DateTime.utc
}
account
|> build(:appointments)
|> Appointment.create_changeset(appointment_params)
|> Repo.insert
|> case do
{:ok, appointment} ->
assert appointment.company_name == company.name
appointment = appointment
|> Repo.preload([:truck, :weighticket])
assert appointment.truck.appointment_id == appointment.id
assert appointment.truck.golive_at == appointment.golive_at
assert appointment.weighticket.appointment_id == appointment.id
assert appointment.weighticket.golive_at == appointment.golive_at
{:error, changeset} ->
refute changeset
end
end
end | 31.645161 | 73 | 0.678899 |
7924ccecaab802dd6c5e078220ac3a6ec0703be1 | 3,560 | ex | Elixir | lib/vintage_net_bridge.ex | nerves-networking/vintage_net_bridge | 6c937d1cb7dc11e5b5537feaba144303dac3d3d4 | [
"Apache-2.0"
] | 3 | 2020-04-23T22:44:04.000Z | 2020-08-03T22:02:19.000Z | lib/vintage_net_bridge.ex | nerves-networking/vintage_net_bridge | 6c937d1cb7dc11e5b5537feaba144303dac3d3d4 | [
"Apache-2.0"
] | 9 | 2020-04-23T22:36:49.000Z | 2022-01-24T10:18:25.000Z | lib/vintage_net_bridge.ex | nerves-networking/vintage_net_bridge | 6c937d1cb7dc11e5b5537feaba144303dac3d3d4 | [
"Apache-2.0"
] | 1 | 2020-06-24T09:37:31.000Z | 2020-06-24T09:37:31.000Z | defmodule VintageNetBridge do
@moduledoc """
Configure network bridges with VintageNet
Configurations for this technology are maps with a `:type` field set to
`VintageNetBridge`. The following additional fields are supported:
* `:vintage_net_bridge` - Bridge options
* `:interfaces` - Set to a list of interface names to add to the bridge.
This option is required for the bridge to be useful.
* `:forward_delay`
* `:priority`
* `:hello_time`
* `:max_age`
* `:path_cost`
* `:path_priority`
* `:hairpin`
* `:stp`
Here's an example configuration for setting up a bridge:
```elixir
%{
type: VintageNetBridge,
vintage_net_bridge: %{
vintage_net_bridge: %{
interfaces: ["eth0", "wlan0"],
}
}
```
See [brctl(8)](https://www.man7.org/linux/man-pages/man8/brctl.8.html) for
more information on individual options.
"""
@behaviour VintageNet.Technology
alias VintageNet.Interface.RawConfig
alias VintageNet.IP.{IPv4Config, DhcpdConfig, DnsdConfig}
alias VintageNetBridge.Server
@impl true
def normalize(config), do: config
@impl true
def to_raw_config(ifname, config, opts) do
normalized_config = normalize(config)
bridge_config = normalized_config[:vintage_net_bridge]
interfaces = Map.fetch!(bridge_config, :interfaces)
up_cmds = [
{:run, "brctl", ["addbr", ifname]}
]
down_cmds = [
{:run, "brctl", ["delbr", ifname]}
]
bridge_up_cmds = Enum.flat_map(bridge_config, &config_to_cmd(&1, "brctl", ifname))
addif_up_cmds =
Map.get(bridge_config, :interfaces, [])
|> Enum.map(fn addif ->
{:run_ignore_errors, "brctl", ["addif", ifname, addif]}
end)
%RawConfig{
ifname: ifname,
type: __MODULE__,
source_config: normalized_config,
up_cmds: up_cmds ++ bridge_up_cmds ++ addif_up_cmds,
down_cmds: down_cmds,
required_ifnames: [],
child_specs: [{Server, %{brctl: "brctl", bridge_ifname: ifname, interfaces: interfaces}}]
}
|> IPv4Config.add_config(normalized_config, opts)
|> DhcpdConfig.add_config(normalized_config, opts)
|> DnsdConfig.add_config(normalized_config, opts)
end
@impl true
def ioctl(_ifname, _command, _args) do
{:error, :unsupported}
end
@impl true
def check_system(_opts) do
{:error, "unimplemented"}
end
defp config_to_cmd({:forward_delay, value}, brctl, ifname) do
[{:run, brctl, ["setfd", ifname, to_string(value)]}]
end
defp config_to_cmd({:priority, value}, brctl, ifname) do
[{:run, brctl, ["setbridgeprio", ifname, to_string(value)]}]
end
defp config_to_cmd({:hello_time, value}, brctl, ifname) do
[{:run, brctl, ["sethello", ifname, to_string(value)]}]
end
defp config_to_cmd({:max_age, value}, brctl, ifname) do
[{:run, brctl, ["setmaxage", ifname, to_string(value)]}]
end
defp config_to_cmd({:path_cost, value}, brctl, ifname) do
[{:run, brctl, ["setpathcost", ifname, to_string(value)]}]
end
defp config_to_cmd({:path_priority, value}, brctl, ifname) do
[{:run, brctl, ["setportprio", ifname, to_string(value)]}]
end
defp config_to_cmd({:hairpin, {port, value}}, brctl, ifname) do
[{:run, brctl, ["hairpin", ifname, to_string(port), bool_to_yn(value)]}]
end
defp config_to_cmd({:stp, value}, brctl, ifname) do
[{:run, brctl, ["stp", ifname, bool_to_yn(value)]}]
end
defp config_to_cmd(_other, _brctl, _ifname), do: []
defp bool_to_yn(true), do: "yes"
defp bool_to_yn(false), do: "no"
end
| 27.8125 | 95 | 0.662079 |
7924d92cacdbc91fa22a4c046f2beba8e9f2bf37 | 2,100 | ex | Elixir | clients/android_device_provisioning/lib/google_api/android_device_provisioning/v1/model/find_devices_by_owner_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/android_device_provisioning/lib/google_api/android_device_provisioning/v1/model/find_devices_by_owner_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/android_device_provisioning/lib/google_api/android_device_provisioning/v1/model/find_devices_by_owner_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.AndroidDeviceProvisioning.V1.Model.FindDevicesByOwnerResponse do
@moduledoc """
Response containing found devices.
## Attributes
* `devices` (*type:* `list(GoogleApi.AndroidDeviceProvisioning.V1.Model.Device.t)`, *default:* `nil`) - The customer's devices.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - A token used to access the next page of results.
Omitted if no further results are available.
* `totalSize` (*type:* `integer()`, *default:* `nil`) - The total count of items in the list irrespective of pagination.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:devices => list(GoogleApi.AndroidDeviceProvisioning.V1.Model.Device.t()),
:nextPageToken => String.t(),
:totalSize => integer()
}
field(:devices, as: GoogleApi.AndroidDeviceProvisioning.V1.Model.Device, type: :list)
field(:nextPageToken)
field(:totalSize)
end
defimpl Poison.Decoder,
for: GoogleApi.AndroidDeviceProvisioning.V1.Model.FindDevicesByOwnerResponse do
def decode(value, options) do
GoogleApi.AndroidDeviceProvisioning.V1.Model.FindDevicesByOwnerResponse.decode(value, options)
end
end
defimpl Poison.Encoder,
for: GoogleApi.AndroidDeviceProvisioning.V1.Model.FindDevicesByOwnerResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.5 | 131 | 0.739524 |
7924e074bb5fa1b72ebc8b04570d9aa6c04c8c7b | 4,372 | ex | Elixir | lib/barna.ex | fartek/barna | cdcc7a89fa3e66459568863cf7713651abb0c688 | [
"MIT"
] | null | null | null | lib/barna.ex | fartek/barna | cdcc7a89fa3e66459568863cf7713651abb0c688 | [
"MIT"
] | null | null | null | lib/barna.ex | fartek/barna | cdcc7a89fa3e66459568863cf7713651abb0c688 | [
"MIT"
] | null | null | null | defmodule Barna do
@moduledoc """
This module contains all of the macro magic that ultimately generates the additional functions
in the schemas that `use` it.
Functions that are added to schemas:
- fetch/1
- list/1
"""
import Ecto.Query
defmacro __using__(_) do
quote do
use Ecto.Schema
import Ecto.Query
@before_compile Barna
end
end
defmacro __before_compile__(env) do
properties =
env.module
|> Module.get_attribute(:ecto_fields)
|> Enum.map(fn {key, _} -> key end)
fallback_case =
quote do
{key, val}, _dynamic ->
raise "Trying to match on property '#{key}' with val '#{val}' on '#{__MODULE__}' but the property doesn't exist"
end
where_cases =
Enum.flat_map(properties, fn property ->
quote do
{unquote(property), value}, dynamic ->
dynamic(
[schema],
^dynamic and
unquote({{:., [], [{:schema, [], Elixir}, property]}, [no_parens: true], []}) ==
^value
)
end
end) ++ fallback_case
reducer = {:fn, [], quote(do: unquote(where_cases))}
quote do
@type fetch_opt ::
{:by, term} | {:include, [atom]} | {:include!, [atom]} | {:result_as_tuple, boolean}
@spec fetch([fetch_opt]) :: struct | nil | {:ok, struct} | {:error, :not_found}
def fetch(opts) do
#######################
# Prepare the opts #
#######################
by = Barna.Options.parse_opt_required!(opts, :by) |> Barna.Options.opt_to_list(:id)
result_as_tuple = Barna.Options.parse_with_default(opts, :result_as_tuple, true)
include = opts[:include]
include! = opts[:include!]
############################
# Generate the queries #
############################
where_params = Enum.reduce(by, dynamic(true), unquote(reducer))
query =
__MODULE__
|> where(^where_params)
|> Barna.Query.parse_include(include)
|> Barna.Query.parse_include!(include!)
####################################
# Fetch and return the results #
####################################
if result_as_tuple do
Barna.fetch_as_tuple(query)
else
Barna.fetch(query)
end
end
@typep order_by_opt :: atom | {:asc, atom} | {:desc, atom}
@type list_opt ::
{:by, term}
| {:include, [atom]}
| {:include!, [atom]}
| {:order_by, order_by_opt}
| {:limit, non_neg_integer}
@spec list([list_opt]) :: [struct]
def list(opts \\ []) do
#######################
# Prepare the opts #
#######################
by = Barna.Options.parse_with_default(opts, :by, nil)
by = if by, do: Barna.Options.opt_to_list(by, :id), else: nil
order_by = opts[:order_by] || :inserted_at
order_by = Barna.Options.opt_to_list(order_by, :asc)
include = opts[:include]
include! = opts[:include!]
limit = opts[:limit]
############################
# Generate the queries #
############################
where_params = if by, do: Enum.reduce(by, dynamic(true), unquote(reducer)), else: true
query =
__MODULE__
|> where(^where_params)
|> order_by(^order_by)
|> Barna.Query.parse_include(include)
|> Barna.Query.parse_include!(include!)
|> Barna.Query.parse_limit(limit)
####################################
# Fetch and return the results #
####################################
Barna.list(query)
end
end
end
@spec fetch(Ecto.Queryable.t()) :: nil | struct
def fetch(query) do
repo_module = Application.get_env(:barna, Barna)[:repo]
repo_module.one(query)
end
@spec fetch_as_tuple(Ecto.Queryable.t()) :: {:ok, struct} | {:error, :not_found}
def fetch_as_tuple(query) do
case fetch(query) do
nil -> {:error, :not_found}
result -> {:ok, result}
end
end
@spec list(Ecto.Queryable.t()) :: [struct]
def list(query) do
repo_module = Application.get_env(:barna, Barna)[:repo]
repo_module.all(query)
end
end
| 29.146667 | 122 | 0.509149 |
7924e118095b1693a427edf198efaae4c1245593 | 26,718 | ex | Elixir | lib/sweet_xml.ex | J3RN/sweet_xml | 18f3b299b14e0d33f30b9e873c2ca83231a1ef55 | [
"MIT"
] | null | null | null | lib/sweet_xml.ex | J3RN/sweet_xml | 18f3b299b14e0d33f30b9e873c2ca83231a1ef55 | [
"MIT"
] | null | null | null | lib/sweet_xml.ex | J3RN/sweet_xml | 18f3b299b14e0d33f30b9e873c2ca83231a1ef55 | [
"MIT"
] | null | null | null | defmodule SweetXpath do
@moduledoc false
defmodule Priv do
@moduledoc false
@doc false
def self_val(val), do: val
end
defstruct path: ".",
is_value: true,
is_list: false,
is_keyword: false,
is_optional: false,
cast_to: false,
transform_fun: &(Priv.self_val/1),
namespaces: []
end
defmodule SweetXml do
@moduledoc ~S"""
`SweetXml` is a thin wrapper around `:xmerl`. It allows you to convert a
string or xmlElement record as defined in `:xmerl` to an elixir value such
as `map`, `list`, `char_list`, or any combination of these.
For normal sized documents, `SweetXml` primarily exposes 3 functions
* `SweetXml.xpath/2` - return a value based on the xpath expression
* `SweetXml.xpath/3` - similar to above but allowing nesting of mapping
* `SweetXml.xmap/2` - return a map with keywords mapped to values returned
from xpath
For something larger, `SweetXml` mainly exposes 1 function
* `SweetXml.stream_tags/3` - stream a given tag or a list of tags, and
optionally "discard" some dom elements in order to free memory during
streaming for big files which cannot fit entirely in memory
## Examples
Simple Xpath:
iex> import SweetXml
iex> doc = "<h1><a>Some linked title</a></h1>"
iex> doc |> xpath(~x"//a/text()")
'Some linked title'
Nested Mapping:
iex> import SweetXml
iex> doc = "<body><header><p>Message</p><ul><li>One</li><li><a>Two</a></li></ul></header></body>"
iex> doc |> xpath(~x"//header", message: ~x"./p/text()", a_in_li: ~x".//li/a/text()"l)
%{a_in_li: ['Two'], message: 'Message'}
Streaming:
iex> import SweetXml
iex> doc = ["<ul><li>l1</li><li>l2", "</li><li>l3</li></ul>"]
iex> SweetXml.stream_tags(doc, :li)
...> |> Stream.map(fn {:li, doc} ->
...> doc |> SweetXml.xpath(~x"./text()")
...> end)
...> |> Enum.to_list
['l1', 'l2', 'l3']
For more examples please see help for each individual functions
## The ~x Sigil
Notice in the above examples, we used the expression `~x"//a/text()"` to
define the path. The reason is it allows us to more precisely specify what
is being returned.
* `~x"//some/path"`
without any modifiers, `xpath/2` will return the value of the entity if
the entity is of type `xmlText`, `xmlAttribute`, `xmlPI`, `xmlComment`
as defined in `:xmerl`
* `~x"//some/path"e`
`e` stands for (e)ntity. This forces `xpath/2` to return the entity with
which you can further chain your `xpath/2` call
* `~x"//some/path"l`
'l' stands for (l)ist. This forces `xpath/2` to return a list. Without
`l`, `xpath/2` will only return the first element of the match
* `~x"//some/path"el` - mix of the above
* `~x"//some/path"k`
'k' stands for (K)eyword. This forces `xpath/2` to return a Keyword instead of a Map.
* `~x"//some/path"s`
's' stands for (s)tring. This forces `xpath/2` to return the value as
string instead of a char list.
* `x"//some/path"o`
'o' stands for (O)ptional. This allows the path to not exist, and will return nil.
* `~x"//some/path"sl` - string list.
Notice also in the examples section, we always import SweetXml first. This
makes `x_sigil` available in the current scope. Without it, instead of using
`~x`, you can do the following
iex> doc = "<h1><a>Some linked title</a></h1>"
iex> doc |> SweetXml.xpath(%SweetXpath{path: '//a/text()', is_value: true, cast_to: false, is_list: false, is_keyword: false})
'Some linked title'
Note the use of char_list in the path definition.
"""
require Record
@doc false
Record.defrecord :xmlDecl, Record.extract(:xmlDecl, from_lib: "xmerl/include/xmerl.hrl")
@doc false
Record.defrecord :xmlAttribute, Record.extract(:xmlAttribute, from_lib: "xmerl/include/xmerl.hrl")
@doc false
Record.defrecord :xmlNamespace, Record.extract(:xmlNamespace, from_lib: "xmerl/include/xmerl.hrl")
@doc false
Record.defrecord :xmlNsNode, Record.extract(:xmlNsNode, from_lib: "xmerl/include/xmerl.hrl")
@doc false
Record.defrecord :xmlElement, Record.extract(:xmlElement, from_lib: "xmerl/include/xmerl.hrl")
@doc false
Record.defrecord :xmlText, Record.extract(:xmlText, from_lib: "xmerl/include/xmerl.hrl")
@doc false
Record.defrecord :xmlComment, Record.extract(:xmlComment, from_lib: "xmerl/include/xmerl.hrl")
@doc false
Record.defrecord :xmlPI, Record.extract(:xmlPI, from_lib: "xmerl/include/xmerl.hrl")
@doc false
Record.defrecord :xmlDocument, Record.extract(:xmlDocument, from_lib: "xmerl/include/xmerl.hrl")
@doc false
Record.defrecord :xmlObj, Record.extract(:xmlObj, from_lib: "xmerl/include/xmerl.hrl")
@type doc :: (iodata | String.t | Enum.t)
@type spec :: %SweetXpath{}
@opaque xmlElement :: record(:xmlElement)
@doc ~s"""
`sigil_x/2` simply returns a `%SweetXpath{}` struct, with modifiers converted to
boolean fields:
iex> SweetXml.sigil_x("//some/path", 'e')
%SweetXpath{path: '//some/path', is_value: false, cast_to: false, is_list: false, is_keyword: false}
Or you can simply import and use the `~x` expression:
iex> import SweetXml
iex> ~x"//some/path"e
%SweetXpath{path: '//some/path', is_value: false, cast_to: false, is_list: false, is_keyword: false}
Valid modifiers are `e`, `s`, `l` and `k`. Below is the full explanation
* `~x"//some/path"`
without any modifiers, `xpath/2` will return the value of the entity if
the entity is of type `xmlText`, `xmlAttribute`, `xmlPI`, `xmlComment`
as defined in `:xmerl`
* `~x"//some/path"e`
`e` stands for (e)ntity. This forces `xpath/2` to return the entity with
which you can further chain your `xpath/2` call
* `~x"//some/path"l`
'l' stands for (l)ist. This forces `xpath/2` to return a list. Without
`l`, `xpath/2` will only return the first element of the match
* `~x"//some/path"el` - mix of the above
* `~x"//some/path"k`
'k' stands for (K)eyword. This forces `xpath/2` to return a Keyword instead of a Map.
* `~x"//some/path"s`
's' stands for (s)tring. This forces `xpath/2` to return the value as
string instead of a char list.
* `x"//some/path"o`
'o' stands for (O)ptional. This allows the path to not exist, and will return nil.
* `~x"//some/path"sl` - string list.
* `~x"//some/path"i`
'i' stands for (i)nteger. This forces `xpath/2` to return the value as
integer instead of a char list.
* `~x"//some/path"f`
'f' stands for (f)loat. This forces `xpath/2` to return the value as
float instead of a char list.
* `~x"//some/path"il` - integer list
"""
def sigil_x(path, modifiers \\ '') do
%SweetXpath{
path: String.to_charlist(path),
is_value: not(?e in modifiers),
is_list: ?l in modifiers,
is_keyword: ?k in modifiers,
is_optional: ?o in modifiers,
cast_to: cond do
?s in modifiers -> :string
?S in modifiers -> :soft_string
?i in modifiers -> :integer
?I in modifiers -> :soft_integer
?f in modifiers -> :float
?F in modifiers -> :soft_float
:otherwise -> false
end
}
end
def add_namespace(xpath, prefix, uri) do
%SweetXpath{xpath | namespaces: [{to_charlist(prefix), to_charlist(uri)}
| xpath.namespaces]}
end
@doc """
Parse a document into a form ready to be used by `xpath/3` and `xmap/2`.
`doc` can be
- a byte list (iodata)
- a binary
- any enumerable of binaries (for instance `File.stream!/3` result)
`options` can be both:
* `xmerl`'s options as described on the [xmerl_scan](http://www.erlang.org/doc/man/xmerl_scan.html) documentation page,
see [the erlang tutorial](http://www.erlang.org/doc/apps/xmerl/xmerl_examples.html) for some advanced usage.
For example: `parse(doc, quiet: true)`
* `:dtd` to prevent DTD parsing or fetching, with the following possibilities:
* `:none`, will prevent both internal and external entities, it is the recommended options on untrusted XML;
* `:all`, the default, for backward compatibility, allows all DTDs;
* `:internal_only`, will block all attempt at external fetching;
* `[only: entities]` where `entities` is either an atom for a single entity, or a list of atoms.
If any other entity is defined in the XML, `parse` will raise on them.
When `doc` is an enumerable, the `:cont_fun` option cannot be given.
Returns an `xmlElement` record.
"""
@spec parse(doc, opts :: list) :: xmlElement
def parse(doc, opts \\ []) do
ets = :ets.new(nil, [])
dtd_arg = :proplists.get_value(:dtd, opts, :all)
opts = :proplists.delete(:dtd, opts)
opts = SweetXml.Options.handle_dtd(dtd_arg).(ets) ++ opts
try do
do_parse(doc, opts)
after
_ = :ets.delete(ets)
end
end
@doc false
def do_parse(doc, options) when is_binary(doc) do
doc |> :erlang.binary_to_list |> do_parse(options)
end
def do_parse([c | _] = doc, options) when is_integer(c) do
{parsed_doc, _} = :xmerl_scan.string(doc, options)
parsed_doc
end
def do_parse(doc_enum, options) do
{parsed_doc, _} = :xmerl_scan.string('', options ++ continuation_opts(doc_enum))
parsed_doc
end
@doc """
Most common usage of streaming: stream a given tag or a list of tags, and
optionally "discard" some DOM elements in order to free memory during streaming
for big files which cannot fit entirely in memory.
Note that each matched tag produces it's own tree. If a given tag appears in
the discarded options, it is ignored.
- `doc` is an enumerable, data will be pulled during the result stream
enumeration. e.g. `File.stream!("some_file.xml")`
- `tags` is an atom or a list of atoms you want to extract. Each stream element
will be `{:tagname, xmlelem}`. e.g. :li, :header
- `options[:discard]` is the list of tag which will be discarded:
not added to its parent DOM.
- More options details are available with `parse/2`.
## Examples
iex> import SweetXml
iex> doc = ["<ul><li>l1</li><li>l2", "</li><li>l3</li></ul>"]
iex> SweetXml.stream_tags(doc, :li, discard: [:li])
...> |> Stream.map(fn {:li, doc} -> doc |> SweetXml.xpath(~x"./text()") end)
...> |> Enum.to_list
['l1', 'l2', 'l3']
iex> SweetXml.stream_tags(doc, [:ul, :li])
...> |> Stream.map(fn {_, doc} -> doc |> SweetXml.xpath(~x"./text()") end)
...> |> Enum.to_list
['l1', 'l2', 'l3', nil]
Be careful if you set `options[:discard]`. If any of the discarded tags is nested
inside a kept tag, you will not be able to access them.
## Examples
iex> import SweetXml
iex> doc = ["<header>", "<title>XML</title", "><header><title>Nested</title></header></header>"]
iex> SweetXml.stream_tags(doc, :header)
...> |> Stream.map(fn {_, doc} -> SweetXml.xpath(doc, ~x".//title/text()") end)
...> |> Enum.to_list
['Nested', 'XML']
iex> SweetXml.stream_tags(doc, :header, discard: [:title])
...> |> Stream.map(fn {_, doc} -> SweetXml.xpath(doc, ~x"./title/text()") end)
...> |> Enum.to_list
[nil, nil]
"""
def stream_tags(doc, tags, options \\ []) do
tags = if is_atom(tags), do: [tags], else: tags
{discard_tags, xmerl_options} = case :proplists.lookup(:discard, options) do
{:discard, tags} -> {tags, :proplists.delete(:discard, options)}
:none -> {[], options}
end
doc |> stream(fn emit ->
[
hook_fun: fn
entity, xstate when Record.is_record(entity, :xmlElement) ->
name = xmlElement(entity, :name)
if length(tags) == 0 or name in tags do
emit.({name, entity})
end
{entity, xstate}
entity, xstate ->
{entity, xstate}
end,
acc_fun: fn
entity, acc, xstate when Record.is_record(entity, :xmlElement) ->
if xmlElement(entity, :name) in discard_tags do
{acc, xstate}
else
{[entity | acc], xstate}
end
entity, acc, xstate ->
{[entity | acc], xstate}
end
] ++ xmerl_options
end)
end
@doc """
Create an element stream from a XML `doc`.
This is a lower level API compared to `SweetXml.stream_tags`. You can use
the `options_callback` argument to get fine control of what data to be streamed.
- `doc` is an enumerable, data will be pulled during the result stream
enumeration. e.g. `File.stream!("some_file.xml")`
- `options_callback` is an anonymous function `fn emit -> (xmerl_opts | opts)` use it to
define your :xmerl callbacks and put data into the stream using
`emit.(elem)` in the callbacks. More details are available with `parse/2`.
For example, here you define a stream of all `xmlElement` :
iex> import Record
iex> doc = ["<h1", "><a>Som", "e linked title</a><a>other</a></h1>"]
iex> SweetXml.stream(doc, fn emit ->
...> [
...> hook_fun: fn
...> entity, xstate when is_record(entity, :xmlElement)->
...> emit.(entity)
...> {entity, xstate}
...> entity, xstate ->
...> {entity,xstate}
...> end
...> ]
...> end) |> Enum.count
3
"""
def stream(doc, options_callback) when is_binary(doc) do
stream([doc], options_callback)
end
def stream([c | _] = doc, options_callback) when is_integer(c) do
stream([IO.iodata_to_binary(doc)], options_callback)
end
def stream(doc, options_callback) do
Stream.resource fn ->
{parent, ref} = waiter = {self(), make_ref()}
opts = options_callback.(fn e -> send(parent, {:event, ref, e}) end)
ets = :ets.new(nil, [:public])
dtd_arg = :proplists.get_value(:dtd, opts, :all)
opts = :proplists.delete(:dtd, opts)
opts = SweetXml.Options.handle_dtd(dtd_arg).(ets) ++ opts
pid = spawn_link fn -> :xmerl_scan.string('', opts ++ continuation_opts(doc, waiter)) end
{ref, pid, Process.monitor(pid), ets}
end, fn {ref, pid, monref, ets} = acc ->
receive do
{:DOWN, ^monref, _, _, _} ->
{:halt, {:parse_ended, ets}} ## !!! maybe do something when reason !== :normal
{:event, ^ref, event} ->
{[event], acc}
{:wait, ^ref} ->
send(pid, {:continue, ref})
{[], acc}
end
end, fn
{:parse_ended, ets} ->
_ = :ets.delete(ets)
:ok
{ref, pid, monref, ets} ->
Process.demonitor(monref)
_ = :ets.delete(ets)
flush_halt(pid, ref)
end
end
@doc ~S"""
`xpath` allows you to query an XML document with XPath.
The second argument to xpath is a `%SweetXpath{}` struct. The optional third
argument is a keyword list, such that the value of each keyword is also
either a `%SweetXpath{}` or a list with head being a `%SweetXpath{}` and tail being
another keyword list exactly like before. Please see the examples below for better
understanding.
## Examples
Simple:
iex> import SweetXml
iex> doc = "<h1><a>Some linked title</a></h1>"
iex> doc |> xpath(~x"//a/text()")
'Some linked title'
With optional mapping:
iex> import SweetXml
iex> doc = "<body><header><p>Message</p><ul><li>One</li><li><a>Two</a></li></ul></header></body>"
iex> doc |> xpath(~x"//header", message: ~x"./p/text()", a_in_li: ~x".//li/a/text()"l)
%{a_in_li: ['Two'], message: 'Message'}
With optional mapping and nesting:
iex> import SweetXml
iex> doc = "<body><header><p>Message</p><ul><li>One</li><li><a>Two</a></li></ul></header></body>"
iex> doc
...> |> xpath(
...> ~x"//header",
...> ul: [
...> ~x"./ul",
...> a: ~x"./li/a/text()"
...> ]
...> )
%{ul: %{a: 'Two'}}
## Security
Whenever you are working with some xml that was not generated by your system,
it is highly recommended that you restrain some functionalities of XML
during the parsing. SweetXml allows in particular to prevent DTD parsing and fetching.
Unless you know exactly what kind of DTD you want to permit in your xml,
it is recommended that you use the following code example to prevent possible attacks:
```
doc
|> parse(dtd: :none)
|> xpath(spec, subspec)
```
For more details, see `parse/2`.
"""
@spec xpath(parent :: (doc | xmlElement), spec, subspec) :: any
when subspec: keyword(spec | subspec)
def xpath(parent, spec, subspec \\ [])
def xpath(parent, spec, []) when not is_tuple(parent) do
parent |> parse |> xpath(spec)
end
def xpath(parent, %SweetXpath{is_list: true, is_value: true, cast_to: cast, is_optional: is_opt?} = spec, []) do
get_current_entities(parent, spec) |> Enum.map(&(_value(&1)) |> to_cast(cast,is_opt?)) |> spec.transform_fun.()
end
def xpath(parent, %SweetXpath{is_list: true, is_value: false} = spec, []) do
get_current_entities(parent, spec) |> spec.transform_fun.()
end
def xpath(parent, %SweetXpath{is_list: false, is_value: true, cast_to: string_type, is_optional: is_opt?} = spec, []) when string_type in [:string,:soft_string] do
spec = %SweetXpath{spec | is_list: true}
get_current_entities(parent, spec)
|> Enum.map(&(_value(&1) |> to_cast(string_type, is_opt?)))
|> Enum.join
|> spec.transform_fun.()
end
def xpath(parent, %SweetXpath{is_list: false, is_value: true, cast_to: cast, is_optional: is_opt?} = spec, []) do
get_current_entities(parent, spec) |> _value |> to_cast(cast, is_opt?) |> spec.transform_fun.()
end
def xpath(parent, %SweetXpath{is_list: false, is_value: false} = spec, []) do
get_current_entities(parent, spec) |> spec.transform_fun.()
end
def xpath(parent, sweet_xpath, subspec) do
if sweet_xpath.is_list do
current_entities = xpath(parent, sweet_xpath)
Enum.map(current_entities, fn (entity) -> xmap(entity, subspec, sweet_xpath) end)
else
current_entity = xpath(parent, sweet_xpath)
xmap(current_entity, subspec, sweet_xpath)
end
end
@doc ~S"""
`xmap` returns a mapping with each value being the result of `xpath`.
Just as `xpath`, you can nest the mapping structure. Please see `xpath/3` for
more detail.
You can give the option `true` to get the result as a keyword list instead of a map.
## Examples
Simple:
iex> import SweetXml
iex> doc = "<h1><a>Some linked title</a></h1>"
iex> doc |> xmap(a: ~x"//a/text()")
%{a: 'Some linked title'}
With optional mapping:
iex> import SweetXml
iex> doc = "<body><header><p>Message</p><ul><li>One</li><li><a>Two</a></li></ul></header></body>"
iex> doc |> xmap(message: ~x"//p/text()", a_in_li: ~x".//li/a/text()"l)
%{a_in_li: ['Two'], message: 'Message'}
With optional mapping and nesting:
iex> import SweetXml
iex> doc = "<body><header><p>Message</p><ul><li>One</li><li><a>Two</a></li></ul></header></body>"
iex> doc
...> |> xmap(
...> message: ~x"//p/text()",
...> ul: [
...> ~x"//ul",
...> a: ~x"./li/a/text()"
...> ]
...> )
%{message: 'Message', ul: %{a: 'Two'}}
iex> doc
...> |> xmap(
...> message: ~x"//p/text()",
...> ul: [
...> ~x"//ul"k,
...> a: ~x"./li/a/text()"
...> ]
...> )
%{message: 'Message', ul: [a: 'Two']}
iex> doc
...> |> xmap([
...> message: ~x"//p/text()",
...> ul: [
...> ~x"//ul",
...> a: ~x"./li/a/text()"
...> ]
...> ], true)
[message: 'Message', ul: %{a: 'Two'}]
## Security
Whenever you are working with some xml that was not generated by your system,
it is highly recommended that you restrain some functionalities of XML
during the parsing. SweetXml allows in particular to prevent DTD parsing and fetching.
Unless you know exactly what kind of DTD you want to permit in your xml,
it is recommended that you use the following code example to prevent possible attacks:
```
doc
|> parse(dtd: :none)
|> xmap(specs, options)
```
For more details, see `parse/2`.
"""
@spec xmap(parent :: (doc | xmlElement), mapping :: specs, options :: (boolean | map)) :: (map | keyword)
when specs: keyword(spec | specs)
def xmap(parent, mapping, options \\ false)
def xmap(nil, _, %{is_optional: true}), do: nil
def xmap(parent, [], atom) when is_atom(atom), do: xmap(parent, [], %{is_keyword: atom})
def xmap(_, [], %{is_keyword: false}), do: %{}
def xmap(_, [], %{is_keyword: true}), do: []
def xmap(parent, [{label, spec} | tail], is_keyword) when is_list(spec) do
[sweet_xpath | subspec] = spec
result = xmap(parent, tail, is_keyword)
put_in result[label], xpath(parent, sweet_xpath, subspec)
end
def xmap(parent, [{label, sweet_xpath} | tail], is_keyword) do
result = xmap(parent, tail, is_keyword)
put_in result[label], xpath(parent, sweet_xpath)
end
@doc """
Tags `%SweetXpath{}` with `fun` to be applied at the end of `xpath` query.
## Examples
iex> import SweetXml
iex> string_to_range = fn str ->
...> [first, last] = str |> String.split("-", trim: true) |> Enum.map(&String.to_integer/1)
...> first..last
...> end
iex> doc = "<weather><zone><name>north</name><wind-speed>5-15</wind-speed></zone></weather>"
iex> doc
...> |> xpath(
...> ~x"//weather/zone"l,
...> name: ~x"//name/text()"s |> transform_by(&String.capitalize/1),
...> wind_speed: ~x"./wind-speed/text()"s |> transform_by(string_to_range)
...> )
[%{name: "North", wind_speed: 5..15}]
"""
def transform_by(%SweetXpath{}=sweet_xpath, fun) when is_function(fun) do
%{sweet_xpath | transform_fun: fun}
end
defp _value(entity) do
cond do
is_record? entity, :xmlText ->
xmlText(entity, :value)
is_record? entity, :xmlComment ->
xmlComment(entity, :value)
is_record? entity, :xmlPI ->
xmlPI(entity, :value)
is_record? entity, :xmlAttribute ->
xmlAttribute(entity, :value)
is_record? entity, :xmlObj ->
xmlObj(entity, :value)
true ->
entity
end
end
defp is_record?(data, kind) do
is_tuple(data) and tuple_size(data) > 0 and :erlang.element(1, data) == kind
end
defp continuation_opts(enum, waiter \\ nil) do
[{
:continuation_fun,
fn xcont, xexc, xstate ->
case :xmerl_scan.cont_state(xstate).({:cont, []}) do
{:halted, _acc} ->
xexc.(xstate)
{:suspended, bin, cont}->
case waiter do
nil -> :ok
{parent, ref} ->
send(parent, {:wait, ref}) # continuation behaviour, pause and wait stream decision
receive do
{:continue, ^ref} -> # stream continuation fun has been called: parse to find more elements
:ok
{:halt, ^ref} -> # stream halted: halt the underlying stream and exit parsing process
cont.({:halt, []})
exit(:normal)
end
end
xcont.(bin, :xmerl_scan.cont_state(cont, xstate))
{:done, _} -> xexc.(xstate)
end
end,
&Enumerable.reduce(split_by_whitespace(enum), &1, fn bin, _ -> {:suspend, bin} end)
},
{
:close_fun,
fn xstate -> # make sure the XML end halts the binary stream (if more bytes are available after XML)
:xmerl_scan.cont_state(xstate).({:halt,[]})
xstate
end
}]
end
defp split_by_whitespace(enum) do
reducer = fn
:last, prev ->
{[:erlang.binary_to_list(prev)], :done}
bin, prev ->
bin = if (prev === ""), do: bin, else: IO.iodata_to_binary([prev, bin])
case split_last_whitespace(bin) do
:white_bin -> {[], bin}
{head, tail} -> {[:erlang.binary_to_list(head)], tail}
end
end
Stream.concat(enum, [:last]) |> Stream.transform("", reducer)
end
defp split_last_whitespace(bin), do: split_last_whitespace(byte_size(bin) - 1, bin)
defp split_last_whitespace(0, _), do: :white_bin
defp split_last_whitespace(size, bin) do
case bin do
<<_::binary - size(size), h>> <> tail when h == ?\s or h == ?\n or h == ?\r or h == ?\t ->
{head, _} = :erlang.split_binary(bin, size + 1)
{head, tail}
_ ->
split_last_whitespace(size - 1, bin)
end
end
defp flush_halt(pid, ref) do
receive do
{:event, ^ref, _} ->
flush_halt(pid, ref) # flush all emitted elems after :halt
{:wait, ^ref} ->
send(pid, {:halt, ref}) # tell the continuation function to halt the underlying stream
end
end
defp get_current_entities(parent, %SweetXpath{path: path, is_list: true, namespaces: namespaces}) do
:xmerl_xpath.string(path, parent, [namespace: namespaces]) |> List.wrap
end
defp get_current_entities(parent, %SweetXpath{path: path, is_list: false, namespaces: namespaces}) do
ret = :xmerl_xpath.string(path, parent, [namespace: namespaces])
if is_record?(ret, :xmlObj) do
ret
else
List.first(ret)
end
end
defp to_cast(value, false, _is_opt?), do: value
defp to_cast(nil, _, true), do: nil
defp to_cast(value, :string, _is_opt?), do: to_string(value)
defp to_cast(value, :integer, _is_opt?), do: String.to_integer(to_string(value))
defp to_cast(value, :float, _is_opt?) do
{float,_} = Float.parse(to_string(value))
float
end
defp to_cast(value, :soft_string, is_opt?) do
if String.Chars.impl_for(value) do
to_string(value)
else
if is_opt?, do: nil, else: ""
end
end
defp to_cast(value, :soft_integer, is_opt?) do
if String.Chars.impl_for(value) do
case Integer.parse(to_string(value)) do
:error-> if is_opt?, do: nil, else: 0
{int,_}-> int
end
else
if is_opt?, do: nil, else: 0
end
end
defp to_cast(value, :soft_float, is_opt?) do
if String.Chars.impl_for(value) do
case Float.parse(to_string(value)) do
:error-> if is_opt?, do: nil, else: 0.0
{float,_}->float
end
else
if is_opt?, do: nil, else: 0.0
end
end
end
| 34.209987 | 165 | 0.605622 |
7925000c9174b2f646cd6f6ebbe7ce1e3d2138d8 | 1,154 | exs | Elixir | test/100/p7_test.exs | penqen/yukicoder-elixir | 4f3e9e4694a14434cc3700280e9205226434733b | [
"MIT"
] | null | null | null | test/100/p7_test.exs | penqen/yukicoder-elixir | 4f3e9e4694a14434cc3700280e9205226434733b | [
"MIT"
] | null | null | null | test/100/p7_test.exs | penqen/yukicoder-elixir | 4f3e9e4694a14434cc3700280e9205226434733b | [
"MIT"
] | null | null | null | defmodule P7Test do
use ExUnit.Case
doctest P7
alias P7
import TestHelper, only: [mesure: 3]
test "01.txt", do: assert P7.solve(5) == true
test "02.txt", do: assert P7.solve(12) == false
test "03.txt", do: assert P7.solve(100_000) == true
test "04.txt", do: assert P7.solve(2_252) == true
test "05.txt", do: assert P7.solve(1_075) == true
test "06.txt", do: assert P7.solve(1_059) == false
test "07.txt", do: assert P7.solve(5_143) == true
test "08.txt", do: assert P7.solve(4_145) == true
test "09.txt", do: assert P7.solve(2_675) == true
test "10.txt", do: assert P7.solve(6_447) == false
test "99sys01.txt", do: assert P7.solve(4_171) == true
test "99sys02.txt", do: assert P7.solve(8_359) == true
test "99sys03.txt", do: assert P7.solve(8_643) == true
test "sys01.txt", do: assert P7.solve(9_981) == false
test "sys02.txt", do: assert P7.solve(9_711) == false
test "sys03.txt", do: assert P7.solve(9_299) == true
@tag timeout: :infinity
test "time" do
algs = [
[&P7.solve/1, [10_000], "dp"],
[&P7.solve_light/1, [10_000], "dp(L)"],
[&P7.solve_full/1, [10_000], "dp(FL)"]
]
mesure("10_000", 10, algs)
end
end | 32.055556 | 55 | 0.643847 |
792547d271ba018a33d5fb9dc324fe3a120d0330 | 190 | ex | Elixir | lib/live_view/sales.ex | eduardo-rdguez/phoenix-liveview | 66ced7a3cd3c8f57c1e5e8c3646dcd7c837d202c | [
"MIT"
] | null | null | null | lib/live_view/sales.ex | eduardo-rdguez/phoenix-liveview | 66ced7a3cd3c8f57c1e5e8c3646dcd7c837d202c | [
"MIT"
] | 4 | 2021-07-19T22:03:03.000Z | 2021-07-22T16:38:34.000Z | lib/live_view/sales.ex | eduardo-rdguez/phoenix-liveview | 66ced7a3cd3c8f57c1e5e8c3646dcd7c837d202c | [
"MIT"
] | null | null | null | defmodule LiveView.Sales do
def new_orders do
Enum.random(5..20)
end
def sales_amount do
Enum.random(100..1000)
end
def satisfaction do
Enum.random(90..95)
end
end
| 13.571429 | 27 | 0.678947 |
79256312e3824cfbfb6e6dd2ea3e1acdde1933ec | 8,797 | ex | Elixir | clients/you_tube_analytics/lib/google_api/you_tube_analytics/v1/api/group_items.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/you_tube_analytics/lib/google_api/you_tube_analytics/v1/api/group_items.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/you_tube_analytics/lib/google_api/you_tube_analytics/v1/api/group_items.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.YouTubeAnalytics.V1.Api.GroupItems do
@moduledoc """
API calls for all endpoints tagged `GroupItems`.
"""
alias GoogleApi.YouTubeAnalytics.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Removes an item from a group.
## Parameters
- connection (GoogleApi.YouTubeAnalytics.V1.Connection): Connection to server
- id (String.t): The id parameter specifies the YouTube group item ID for the group that is being deleted.
- opts (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :onBehalfOfContentOwner (String.t): Note: This parameter is intended exclusively for YouTube content partners. The onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.
## Returns
{:ok, %{}} on success
{:error, info} on failure
"""
@spec youtube_analytics_group_items_delete(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, nil} | {:error, Tesla.Env.t()}
def youtube_analytics_group_items_delete(connection, id, opts \\ []) do
optional_params = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:onBehalfOfContentOwner => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/groupItems")
|> Request.add_param(:query, :id, id)
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(decode: false)
end
@doc """
Creates a group item.
## Parameters
- connection (GoogleApi.YouTubeAnalytics.V1.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :onBehalfOfContentOwner (String.t): Note: This parameter is intended exclusively for YouTube content partners. The onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.
- :body (GroupItem):
## Returns
{:ok, %GoogleApi.YouTubeAnalytics.V1.Model.GroupItem{}} on success
{:error, info} on failure
"""
@spec youtube_analytics_group_items_insert(Tesla.Env.client(), keyword()) ::
{:ok, GoogleApi.YouTubeAnalytics.V1.Model.GroupItem.t()} | {:error, Tesla.Env.t()}
def youtube_analytics_group_items_insert(connection, opts \\ []) do
optional_params = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:onBehalfOfContentOwner => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/groupItems")
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.YouTubeAnalytics.V1.Model.GroupItem{})
end
@doc """
Returns a collection of group items that match the API request parameters.
## Parameters
- connection (GoogleApi.YouTubeAnalytics.V1.Connection): Connection to server
- group_id (String.t): The id parameter specifies the unique ID of the group for which you want to retrieve group items.
- opts (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :onBehalfOfContentOwner (String.t): Note: This parameter is intended exclusively for YouTube content partners. The onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.
## Returns
{:ok, %GoogleApi.YouTubeAnalytics.V1.Model.GroupItemListResponse{}} on success
{:error, info} on failure
"""
@spec youtube_analytics_group_items_list(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.YouTubeAnalytics.V1.Model.GroupItemListResponse.t()}
| {:error, Tesla.Env.t()}
def youtube_analytics_group_items_list(connection, group_id, opts \\ []) do
optional_params = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:onBehalfOfContentOwner => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/groupItems")
|> Request.add_param(:query, :groupId, group_id)
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.YouTubeAnalytics.V1.Model.GroupItemListResponse{})
end
end
| 51.145349 | 713 | 0.721041 |
7925794e3cdfd8ac3db1be78921751cf5fd9361c | 1,085 | ex | Elixir | lib/hologram/compiler/transformers/alias_directive_transformer.ex | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 40 | 2022-01-19T20:27:36.000Z | 2022-03-31T18:17:41.000Z | lib/hologram/compiler/transformers/alias_directive_transformer.ex | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 42 | 2022-02-03T22:52:43.000Z | 2022-03-26T20:57:32.000Z | lib/hologram/compiler/transformers/alias_directive_transformer.ex | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 3 | 2022-02-10T04:00:37.000Z | 2022-03-08T22:07:45.000Z | defmodule Hologram.Compiler.AliasDirectiveTransformer do
alias Hologram.Compiler.Helpers
alias Hologram.Compiler.IR.AliasDirective
def transform({:alias, _, [{{_, _, [{_, _, module_segs}, _]}, _, aliases}, _]}) do
transform_multi_alias(module_segs, aliases)
end
def transform({:alias, _, [{{_, _, [{_, _, module_segs}, _]}, _, aliases}]}) do
transform_multi_alias(module_segs, aliases)
end
def transform({:alias, _, [{_, _, module_segs}]}) do
module = Helpers.module(module_segs)
%AliasDirective{module: module, as: [List.last(module_segs)]}
end
def transform({:alias, _, [{_, _, module_segs}, opts]}) do
module = Helpers.module(module_segs)
as =
if Keyword.has_key?(opts, :as) do
elem(opts[:as], 2)
else
[List.last(module_segs)]
end
%AliasDirective{module: module, as: as}
end
defp transform_multi_alias(module_segs, aliases) do
Enum.map(aliases, fn {:__aliases__, _, as} ->
module = Helpers.module(module_segs ++ as)
%AliasDirective{module: module, as: as}
end)
end
end
| 28.552632 | 84 | 0.656221 |
792583a9cbad107528033692582ce89442379a3e | 1,033 | exs | Elixir | exercism.io/elixir/bob/bob.exs | metabrain/elixir-playground | 0c114ee8a8cb2d610f54b9cca83cbe6917226c33 | [
"MIT"
] | null | null | null | exercism.io/elixir/bob/bob.exs | metabrain/elixir-playground | 0c114ee8a8cb2d610f54b9cca83cbe6917226c33 | [
"MIT"
] | null | null | null | exercism.io/elixir/bob/bob.exs | metabrain/elixir-playground | 0c114ee8a8cb2d610f54b9cca83cbe6917226c33 | [
"MIT"
] | null | null | null | # Bob
# Bob is a lackadaisical teenager. In conversation, his responses are very limited.
# Bob answers 'Sure.' if you ask him a question.
# He answers 'Whoa, chill out!' if you yell at him.
# He says 'Fine. Be that way!' if you address him without actually saying anything.
# He answers 'Whatever.' to anything else.
defmodule Bob do
def hey(input) do
case input do
"" -> "Fine. Be that way!"
" " <> tail -> hey(tail)
_ -> heyRec(String.codepoints(input), true, false)
end
end
def heyRec([], false, _), do: "Whatever."
def heyRec([], true, true), do: "Whoa, chill out!"
def heyRec(["?"], _, _), do: "Sure."
def heyRec(l, yell, letters) do
head = hd(l)
tail = tl(l)
IO.puts("is head letter: #{headletter}")
IO.puts("head: #{head} tail: #{tail} yell: #{yell} letters: #{letters}")
# <> " tail: " <> tail <> " yell: " <> yell)
case String.upcase(head) do
^head -> heyRec(tail, yell, headletter)
_ -> heyRec(tail, false, false)
end
end
end
| 27.918919 | 83 | 0.596321 |
79258e08fb9414648bed9b2f07a85bfa4961a5df | 1,119 | exs | Elixir | config/config.exs | ramortegui/pcal | 6a5a3554f6cd1f3fe2e8458de1d8edf5861cddd1 | [
"MIT"
] | null | null | null | config/config.exs | ramortegui/pcal | 6a5a3554f6cd1f3fe2e8458de1d8edf5861cddd1 | [
"MIT"
] | null | null | null | config/config.exs | ramortegui/pcal | 6a5a3554f6cd1f3fe2e8458de1d8edf5861cddd1 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :pcal, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:pcal, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env()}.exs"
| 36.096774 | 73 | 0.748883 |
7925954b4b2d89bdb41d5074fe022e88cb179f8d | 479 | ex | Elixir | lib/ex_admin/themes/active_admin/page.ex | andriybohdan/ex_admin | e31c725078ac4e7390204a87d96360a21ffe7b90 | [
"MIT"
] | 1 | 2018-08-30T20:20:56.000Z | 2018-08-30T20:20:56.000Z | lib/ex_admin/themes/active_admin/page.ex | 8thlight/ex_admin | 314d4068270c47799ec54f719073a565222bcfad | [
"MIT"
] | null | null | null | lib/ex_admin/themes/active_admin/page.ex | 8thlight/ex_admin | 314d4068270c47799ec54f719073a565222bcfad | [
"MIT"
] | 2 | 2018-07-12T07:44:50.000Z | 2018-07-19T11:45:09.000Z | defmodule ExAdmin.Theme.ActiveAdmin.Page do
@moduledoc false
use Xain
def columns(cols) do
col_count = Enum.count(cols)
count = Kernel.div 12, col_count
div ".columns" do
for {html, inx} <- Enum.with_index(cols) do
style = "width: #{100 / (12 / count) - 2}%;" <>
if inx < (col_count - 1), do: " margin-right: 2%;", else: ""
div html, class: "column", style: style
end
div("", style: "clear:both;")
end
end
end
| 26.611111 | 70 | 0.574113 |
79259d4c8b7eafdaf1d20a059aee2daaadb3ed5e | 475 | exs | Elixir | mix.exs | skunkwerks/ucl-elixir | 059183f107db4997958e8cd57ae5a2e1557835a7 | [
"MIT"
] | null | null | null | mix.exs | skunkwerks/ucl-elixir | 059183f107db4997958e8cd57ae5a2e1557835a7 | [
"MIT"
] | null | null | null | mix.exs | skunkwerks/ucl-elixir | 059183f107db4997958e8cd57ae5a2e1557835a7 | [
"MIT"
] | null | null | null | defmodule UCL.MixProject do
use Mix.Project
def project do
[
app: :ucl,
version: "0.1.0",
elixir: "~> 1.8",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:benchee, "~> 1.0", only: :dev}
]
end
end
| 16.964286 | 59 | 0.56 |
7925be5b1b767a0468681b25d3ec7da4027c22c1 | 1,775 | ex | Elixir | lib/eth_event/transport.ex | etherharvest/eth_event | 488f526b382ca1c0ed5235d3a12ec059b0229fc4 | [
"MIT"
] | 1 | 2022-01-05T16:28:18.000Z | 2022-01-05T16:28:18.000Z | lib/eth_event/transport.ex | etherharvest/eth_event | 488f526b382ca1c0ed5235d3a12ec059b0229fc4 | [
"MIT"
] | null | null | null | lib/eth_event/transport.ex | etherharvest/eth_event | 488f526b382ca1c0ed5235d3a12ec059b0229fc4 | [
"MIT"
] | null | null | null | defmodule EthEvent.Transport do
@moduledoc """
Transport layer for JSON RPC from Ethereum nodes.
"""
use Tesla, docs: false, only: [:post]
alias EthEvent.Settings
############
# Public API
@doc """
Execute remote `method` with some optional `parameters`.
"""
@spec rpc(binary()) :: {:ok, term()} | {:error, term()}
@spec rpc(binary(), term()) :: {:ok, term()} | {:error, term()}
def rpc(method, parameters \\ []) do
make_ref()
|> :erlang.phash2()
|> rpc(method, parameters)
end
@doc """
Sends a JSON RPC request to a Ethereum node. Receives an `id`, a `method`
and a list for method `parameters`.
"""
@spec rpc(integer(), binary(), term()) :: {:ok, term()} | {:error, term()}
def rpc(id, method, params) do
key = Settings.eth_event_node_key()
data =
id
|> base_data(method)
|> Map.put_new("params", params)
client = build_client()
with {:ok, response} <- post(client, key, data) do
build_response(response)
end
end
#########
# Helpers
@doc false
def base_data(id, method) do
%{
"jsonrpc" => "2.0",
"id" => id,
"method" => method,
}
end
@doc false
def build_response(
%Tesla.Env{status: 200, body: body}
) when is_binary(body) do
with {:ok, decoded} <- Jason.decode(body) do
build_response(decoded)
end
end
def build_response(%{"result" => result}) do
{:ok, result}
end
def build_response(%{"error" => %{"message" => reason}}) do
{:error, reason}
end
def build_response(_) do
{:error, "Malformed response"}
end
@doc false
def build_client do
Tesla.build_client([
{Tesla.Middleware.BaseUrl, Settings.eth_event_node_url()},
Tesla.Middleware.JSON
])
end
end
| 21.91358 | 76 | 0.589859 |
7925d017c20f618c2efcc25d6467ca7593cdaadf | 2,625 | ex | Elixir | clients/security_center/lib/google_api/security_center/v1/model/google_cloud_securitycenter_v1p1beta1_security_marks.ex | kaaboaye/elixir-google-api | 1896784c4342151fd25becd089a5beb323eff567 | [
"Apache-2.0"
] | null | null | null | clients/security_center/lib/google_api/security_center/v1/model/google_cloud_securitycenter_v1p1beta1_security_marks.ex | kaaboaye/elixir-google-api | 1896784c4342151fd25becd089a5beb323eff567 | [
"Apache-2.0"
] | null | null | null | clients/security_center/lib/google_api/security_center/v1/model/google_cloud_securitycenter_v1p1beta1_security_marks.ex | kaaboaye/elixir-google-api | 1896784c4342151fd25becd089a5beb323eff567 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SecurityCenter.V1.Model.GoogleCloudSecuritycenterV1p1beta1SecurityMarks do
@moduledoc """
User specified security marks that are attached to the parent Cloud Security
Command Center (Cloud SCC) resource. Security marks are scoped within a Cloud
SCC organization -- they can be modified and viewed by all users who have
proper permissions on the organization.
## Attributes
* `marks` (*type:* `map()`, *default:* `nil`) - Mutable user specified security marks belonging to the parent resource.
Constraints are as follows:
* Keys and values are treated as case insensitive
* Keys must be between 1 - 256 characters (inclusive)
* Keys must be letters, numbers, underscores, or dashes
* Values have leading and trailing whitespace trimmed, remaining
characters must be between 1 - 4096 characters (inclusive)
* `name` (*type:* `String.t`, *default:* `nil`) - The relative resource name of the SecurityMarks. See:
https://cloud.google.com/apis/design/resource_names#relative_resource_name
Examples:
"organizations/{organization_id}/assets/{asset_id}/securityMarks"
"organizations/{organization_id}/sources/{source_id}/findings/{finding_id}/securityMarks".
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:marks => map(),
:name => String.t()
}
field(:marks, type: :map)
field(:name)
end
defimpl Poison.Decoder,
for: GoogleApi.SecurityCenter.V1.Model.GoogleCloudSecuritycenterV1p1beta1SecurityMarks do
def decode(value, options) do
GoogleApi.SecurityCenter.V1.Model.GoogleCloudSecuritycenterV1p1beta1SecurityMarks.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.SecurityCenter.V1.Model.GoogleCloudSecuritycenterV1p1beta1SecurityMarks do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.043478 | 123 | 0.736381 |
7925f7cde262274cb88c846e814b68c231fdff19 | 314 | ex | Elixir | lib/ext/map.ex | remerle/asciinema-server | 895bf5a7ffb7db1b418c97cfd2ac9136c46dfb57 | [
"Apache-2.0"
] | null | null | null | lib/ext/map.ex | remerle/asciinema-server | 895bf5a7ffb7db1b418c97cfd2ac9136c46dfb57 | [
"Apache-2.0"
] | null | null | null | lib/ext/map.ex | remerle/asciinema-server | 895bf5a7ffb7db1b418c97cfd2ac9136c46dfb57 | [
"Apache-2.0"
] | null | null | null | defmodule Ext.Map do
def rename(map, old, new) do
case Map.pop(map, old, :not_found) do
{:not_found, _} -> map
{value, map} -> Map.put(map, new, value)
end
end
def rename(map, mapping) do
Enum.reduce(mapping, map, fn {old, new}, map ->
rename(map, old, new)
end)
end
end
| 20.933333 | 51 | 0.585987 |
79263310e2e5dd0b68a835a1e50122e16fcecde7 | 2,973 | ex | Elixir | lib/grovepi/board.ex | Tony1928/rpi-grovepi | 35fcf31607904aaff4b766a87e1328ff13866585 | [
"Apache-2.0"
] | 2 | 2018-09-06T21:30:01.000Z | 2018-09-06T21:30:08.000Z | lib/grovepi/board.ex | schainks/grovepi | 2de21f12a2ab28f9788a2add4c6409871e098479 | [
"Apache-2.0"
] | null | null | null | lib/grovepi/board.ex | schainks/grovepi | 2de21f12a2ab28f9788a2add4c6409871e098479 | [
"Apache-2.0"
] | null | null | null | defmodule GrovePi.Board do
@moduledoc """
Low-level interface for sending raw requests and receiving responses from a
GrovePi hat. Create one of these first and then use one of the other GrovePi
modules for interacting with a connected sensor, light, or actuator.
To check that your GrovePi hardware is working, try this:
```elixir
iex> GrovePi.Board.firmware_version()
"1.2.2"
```
"""
use GrovePi.I2C
@i2c_retry_count 2
@doc """
"""
@spec start_link(byte, atom) :: {:ok, pid} | {:error, any}
def start_link(address, prefix, opts \\ []) when is_integer(address) do
opts = Keyword.put_new(opts, :name, i2c_name(prefix))
@i2c.start_link("i2c-1", address, opts)
end
def i2c_name(prefix) do
String.to_atom("#{prefix}.#{__MODULE__}")
end
@doc """
Get the version of firmware running on the GrovePi's microcontroller.
"""
@spec firmware_version(atom) :: binary | {:error, term}
def firmware_version(prefix \\ Default) do
with :ok <- send_request(prefix, <<8, 0, 0, 0>>),
<<_, major, minor, patch>> <- get_response(prefix, 4),
do: "#{major}.#{minor}.#{patch}"
end
@doc """
Send a request to the GrovePi. This is not normally called directly
except when interacting with an unsupported sensor.
"""
@spec send_request(GenServer.server, binary) :: :ok | {:error, term}
def send_request(prefix, message) when byte_size(message) == 4 do
send_request_with_retry(i2c_name(prefix), message, @i2c_retry_count)
end
def send_request(message) do
send_request(Default, message)
end
@doc """
Get a response to a previously send request to the GrovePi. This is
not normally called directly.
"""
@spec get_response(atom, integer) :: binary | {:error, term}
def get_response(prefix, len) do
get_response_with_retry(i2c_name(prefix), len, @i2c_retry_count)
end
@spec get_response(integer) :: binary | {:error, term}
def get_response(len) do
get_response(Default, len)
end
@doc """
Write directly to a device on the I2C bus. This is used for sensors
that are not controlled by the GrovePi's microcontroller.
"""
def i2c_write_device(address, buffer) do
@i2c.write_device(i2c_name(Default), address, buffer)
end
# The GrovePi has intermittent I2C communication failures. These
# are usually harmless, so automatically retry.
defp send_request_with_retry(_board, _message, 0), do: {:error, :too_many_retries}
defp send_request_with_retry(board, message, retries_left) do
case @i2c.write(board, message) do
{:error, _} -> send_request_with_retry(board, message, retries_left - 1)
response -> response
end
end
defp get_response_with_retry(_board, _len, 0), do: {:error, :too_many_retries}
defp get_response_with_retry(board, len, retries_left) do
case @i2c.read(board, len) do
{:error, _} -> get_response_with_retry(board, len, retries_left - 1)
response -> response
end
end
end
| 31.62766 | 84 | 0.695257 |
79264ed7b5893ff4d370a31ad66cd06a2aee804f | 1,892 | ex | Elixir | clients/dataproc/lib/google_api/dataproc/v1/model/list_clusters_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dataproc/lib/google_api/dataproc/v1/model/list_clusters_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dataproc/lib/google_api/dataproc/v1/model/list_clusters_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Dataproc.V1.Model.ListClustersResponse do
@moduledoc """
The list of all clusters in a project.
## Attributes
* `clusters` (*type:* `list(GoogleApi.Dataproc.V1.Model.Cluster.t)`, *default:* `nil`) - Output only. The clusters in the project.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - Output only. This token is included in the response if there are more results to fetch. To fetch additional results, provide this value as the page_token in a subsequent ListClustersRequest.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:clusters => list(GoogleApi.Dataproc.V1.Model.Cluster.t()),
:nextPageToken => String.t()
}
field(:clusters, as: GoogleApi.Dataproc.V1.Model.Cluster, type: :list)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.Dataproc.V1.Model.ListClustersResponse do
def decode(value, options) do
GoogleApi.Dataproc.V1.Model.ListClustersResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataproc.V1.Model.ListClustersResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.84 | 253 | 0.739429 |
792683e6de0f9bf64e4c70f38e392246ebd17648 | 2,633 | ex | Elixir | lib/string_pinyin.ex | zhangsoledad/alchemic_pinyin | 5426c62b1a37e6b134d285c8f5ace9a52b3c36cb | [
"MIT"
] | 22 | 2016-03-29T07:50:15.000Z | 2018-08-23T09:49:41.000Z | lib/string_pinyin.ex | zhangsoledad/alchemic_pinyin | 5426c62b1a37e6b134d285c8f5ace9a52b3c36cb | [
"MIT"
] | 2 | 2016-11-02T08:06:47.000Z | 2021-12-23T13:22:38.000Z | lib/string_pinyin.ex | zhangsoledad/alchemic_pinyin | 5426c62b1a37e6b134d285c8f5ace9a52b3c36cb | [
"MIT"
] | 4 | 2016-03-31T05:24:30.000Z | 2021-10-07T02:06:49.000Z | defmodule String.Pinyin do
@moduledoc ~S"""
A Pinyin Libary.
"""
@external_resource data_path = Path.join(__DIR__, "data/pinyin.dat")
file_stream = File.stream!(data_path) |> Stream.with_index
pinyin_codes = Enum.reduce(file_stream, [], fn({line, index}, acc) ->
[han, pinyin, tone, pinyin_with_tone ] = line
|> String.rstrip(?\n)
|> :binary.split(";", [:global])
[ { han, pinyin, tone, pinyin_with_tone , index} | acc ]
end) |> Enum.reverse
line_count = length(pinyin_codes)
def to_pinyin(string) do
to_pinyin(string, [])
end
def to_pinyin(string, opts) do
splitter = Keyword.get(opts, :splitter, " ")
tone = Keyword.get(opts, :tone, false)
if tone do
do_to_pinyin_tone(string, "", splitter)
else
do_to_pinyin(string, "", splitter)
end
end
def to_pinyin(string, opts, fun) when is_function(fun) do
fun.(to_pinyin(string, opts))
end
#do_to_pinyin
for {han, pinyin, _tone, pinyin_with_tone, index} <- pinyin_codes do
ProgressBar.render(index , line_count)
defp do_to_pinyin(unquote(han) <> rest, acc, splitter) do
do_to_pinyin(rest, do_acc(acc, splitter, unquote(pinyin), rest), splitter)
end
defp do_to_pinyin_tone(unquote(han) <> rest, acc, splitter) do
do_to_pinyin_tone(rest, do_acc(acc, splitter, unquote(pinyin_with_tone), rest), splitter)
end
end
defp do_to_pinyin(<<char, rest::binary>>, acc, splitter) do
case rest |> String.first |> add_splitter? do
false ->
do_to_pinyin(rest, <<acc::binary, char>>, splitter)
true ->
char_splitter = <<char, splitter::binary>>
do_to_pinyin(rest, <<acc::binary, char_splitter::binary>>, splitter)
end
end
defp do_to_pinyin("", acc, _splitter), do: acc
defp do_to_pinyin_tone(<<char, rest::binary>>, acc, splitter) do
case rest |> String.first |> add_splitter? do
false ->
do_to_pinyin_tone(rest, <<acc::binary, char>>, splitter)
true ->
char_splitter = <<char, splitter::binary>>
do_to_pinyin_tone(rest, <<acc::binary, char_splitter::binary>>, splitter)
end
end
defp do_to_pinyin_tone("", acc, _splitter), do: acc
#do_to_pinyin
defp do_acc(acc, _splitter, pinyin, "") do
acc <> pinyin
end
defp do_acc(acc, splitter, pinyin, _rest) do
acc <> pinyin <> splitter
end
defp add_splitter?(nil) do
false
end
defp add_splitter?(<<char::utf8>>) do
!(( char >= 97 and char <= 122 ) or # a-z
( char >= 65 and char <= 90 ) or # A-Z
( char >= 48 and char <= 57 )) # 0-9
end
defp add_splitter?(_) do
false
end
end
| 27.715789 | 95 | 0.640334 |
79268720b3c839203799c5093b32db3fc34cc57e | 12,830 | ex | Elixir | lib/changelog/schema/episode/episode.ex | snyk-omar/changelog.com | 66a8cff17ed8a237e439976aa7fb96b58ef276a3 | [
"MIT"
] | null | null | null | lib/changelog/schema/episode/episode.ex | snyk-omar/changelog.com | 66a8cff17ed8a237e439976aa7fb96b58ef276a3 | [
"MIT"
] | null | null | null | lib/changelog/schema/episode/episode.ex | snyk-omar/changelog.com | 66a8cff17ed8a237e439976aa7fb96b58ef276a3 | [
"MIT"
] | null | null | null | defmodule Changelog.Episode do
use Changelog.Schema, default_sort: :published_at
alias Changelog.{
EpisodeHost,
EpisodeGuest,
EpisodeRequest,
EpisodeTopic,
EpisodeStat,
EpisodeSponsor,
Files,
Github,
NewsItem,
Notifier,
Podcast,
Regexp,
Search,
Transcripts
}
alias ChangelogWeb.{EpisodeView, TimeView}
defenum(Type, full: 0, bonus: 1, trailer: 2)
schema "episodes" do
field :slug, :string
field :guid, :string
field :title, :string
field :subtitle, :string
field :type, Type
field :featured, :boolean, default: false
field :highlight, :string
field :subhighlight, :string
field :summary, :string
field :notes, :string
field :doc_url, :string
field :published, :boolean, default: false
field :published_at, :utc_datetime
field :recorded_at, :utc_datetime
field :recorded_live, :boolean, default: false
field :youtube_id, :string
field :audio_file, Files.Audio.Type
field :audio_bytes, :integer
field :audio_duration, :integer
field :plusplus_file, Files.PlusPlus.Type
field :plusplus_bytes, :integer
field :plusplus_duration, :integer
field :download_count, :float
field :import_count, :float
field :reach_count, :integer
field :transcript, {:array, :map}
# this exists merely to satisfy the compiler
# see load_news_item/1 and get_news_item/1 for actual use
field :news_item, :map, virtual: true
belongs_to :podcast, Podcast
belongs_to :episode_request, EpisodeRequest, foreign_key: :request_id
has_many :episode_hosts, EpisodeHost, on_delete: :delete_all
has_many :hosts, through: [:episode_hosts, :person]
has_many :episode_guests, EpisodeGuest, on_delete: :delete_all
has_many :guests, through: [:episode_guests, :person]
has_many :episode_topics, EpisodeTopic, on_delete: :delete_all
has_many :topics, through: [:episode_topics, :topic]
has_many :episode_sponsors, EpisodeSponsor, on_delete: :delete_all
has_many :sponsors, through: [:episode_sponsors, :sponsor]
has_many :episode_stats, EpisodeStat, on_delete: :delete_all
timestamps()
end
def distinct_podcast(query), do: from(q in query, distinct: q.podcast_id)
def featured(query \\ __MODULE__), do: from(q in query, where: q.featured == true)
def next_after(query \\ __MODULE__, episode),
do: from(q in query, where: q.published_at > ^episode.published_at)
def previous_to(query \\ __MODULE__, episode),
do: from(q in query, where: q.published_at < ^episode.published_at)
def published(query \\ __MODULE__), do: published(query, Timex.now())
def published(query, as_of),
do: from(q in query, where: q.published, where: q.published_at <= ^Timex.to_datetime(as_of))
def recorded_between(query, start_time, end_time),
do: from(q in query, where: q.recorded_at <= ^start_time, where: q.end_time < ^end_time)
def recorded_future_to(query, time), do: from(q in query, where: q.recorded_at > ^time)
def recorded_live(query \\ __MODULE__),
do: from(q in query, where: q.recorded_live == true)
def scheduled(query \\ __MODULE__),
do: from(q in query, where: q.published, where: q.published_at > ^Timex.now())
def search(query, term),
do: from(q in query, where: fragment("search_vector @@ plainto_tsquery('english', ?)", ^term))
def unpublished(query \\ __MODULE__), do: from(q in query, where: not q.published)
def top_reach_first(query \\ __MODULE__),
do: from(q in query, order_by: [desc: :reach_count])
def with_ids(query \\ __MODULE__, ids), do: from(q in query, where: q.id in ^ids)
def with_numbered_slug(query \\ __MODULE__),
do: from(q in query, where: fragment("slug ~ E'^\\\\d+$'"))
def with_slug(query \\ __MODULE__, slug), do: from(q in query, where: q.slug == ^slug)
def with_podcast_slug(query \\ __MODULE__, slug)
def with_podcast_slug(query, nil), do: query
def with_podcast_slug(query, slug),
do: from(q in query, join: p in Podcast, where: q.podcast_id == p.id, where: p.slug == ^slug)
def full(query \\ __MODULE__), do: from(q in query, where: q.type == ^:full)
def bonus(query \\ __MODULE__), do: from(q in query, where: q.type == ^:bonus)
def trailer(query \\ __MODULE__), do: from(q in query, where: q.type == ^:trailer)
def exclude_transcript(query \\ __MODULE__) do
fields = __MODULE__.__schema__(:fields) |> Enum.reject(&(&1 == :transcript))
from(q in query, select: ^fields)
end
def has_transcript(%{transcript: nil}), do: false
def has_transcript(%{transcript: []}), do: false
def has_transcript(_), do: true
def is_public(episode, as_of \\ Timex.now()) do
is_published(episode) && Timex.before?(episode.published_at, as_of)
end
def is_published(episode), do: episode.published
def is_publishable(episode) do
validated =
episode
|> change(%{})
|> validate_required([:slug, :title, :published_at, :summary, :audio_file])
|> validate_format(:slug, Regexp.slug(), message: Regexp.slug_message())
|> unique_constraint(:slug, name: :episodes_slug_podcast_id_index)
|> cast_assoc(:episode_hosts)
validated.valid? && !is_published(episode)
end
def admin_changeset(struct, params \\ %{}) do
struct
|> cast(params, [
:slug,
:title,
:subtitle,
:published,
:featured,
:request_id,
:highlight,
:subhighlight,
:summary,
:notes,
:doc_url,
:published_at,
:recorded_at,
:recorded_live,
:youtube_id,
:guid,
:type
])
|> cast_attachments(params, [:audio_file, :plusplus_file])
|> validate_required([:slug, :title, :published, :featured])
|> validate_format(:slug, Regexp.slug(), message: Regexp.slug_message())
|> validate_published_has_published_at()
|> unique_constraint(:slug, name: :episodes_slug_podcast_id_index)
|> cast_assoc(:episode_hosts)
|> cast_assoc(:episode_guests)
|> cast_assoc(:episode_sponsors)
|> cast_assoc(:episode_topics)
|> derive_audio_bytes_and_duration()
|> derive_plusplus_bytes_and_duration()
end
def get_news_item(episode) do
episode
|> NewsItem.with_episode()
|> Repo.one()
end
def has_news_item(episode) do
episode
|> NewsItem.with_episode()
|> Repo.exists?()
end
def load_news_item(episode) do
item = episode |> get_news_item() |> NewsItem.load_object(episode)
Map.put(episode, :news_item, item)
end
def object_id(episode), do: "#{episode.podcast_id}:#{episode.id}"
def participants(episode) do
episode =
if Ecto.assoc_loaded?(episode.guests) and Ecto.assoc_loaded?(episode.hosts) do
episode
else
episode
|> preload_guests()
|> preload_hosts()
end
episode.guests ++ episode.hosts
end
def preload_all(episode) do
episode
|> preload_podcast()
|> preload_topics()
|> preload_guests()
|> preload_hosts()
|> preload_sponsors()
|> preload_episode_request()
end
def preload_episode_request(query = %Ecto.Query{}),
do: Ecto.Query.preload(query, :episode_request)
def preload_episode_request(episode), do: Repo.preload(episode, :episode_request)
def preload_hosts(query = %Ecto.Query{}) do
query
|> Ecto.Query.preload(episode_hosts: ^EpisodeHost.by_position())
|> Ecto.Query.preload(:hosts)
end
def preload_hosts(episode) do
episode
|> Repo.preload(episode_hosts: {EpisodeHost.by_position(), :person})
|> Repo.preload(:hosts)
end
def preload_guests(query = %Ecto.Query{}) do
query
|> Ecto.Query.preload(episode_guests: ^EpisodeGuest.by_position())
|> Ecto.Query.preload(:guests)
end
def preload_guests(episode) do
episode
|> Repo.preload(episode_guests: {EpisodeGuest.by_position(), :person})
|> Repo.preload(:guests)
end
def preload_podcast(nil), do: nil
def preload_podcast(query = %Ecto.Query{}), do: Ecto.Query.preload(query, :podcast)
def preload_podcast(episode), do: Repo.preload(episode, :podcast)
def preload_sponsors(query = %Ecto.Query{}) do
query
|> Ecto.Query.preload(episode_sponsors: ^EpisodeSponsor.by_position())
|> Ecto.Query.preload(:sponsors)
end
def preload_sponsors(episode) do
episode
|> Repo.preload(episode_sponsors: {EpisodeSponsor.by_position(), :sponsor})
|> Repo.preload(:sponsors)
end
def preload_topics(query = %Ecto.Query{}) do
query
|> Ecto.Query.preload(episode_topics: ^EpisodeTopic.by_position())
|> Ecto.Query.preload(:topics)
end
def preload_topics(episode) do
episode
|> Repo.preload(episode_topics: {EpisodeTopic.by_position(), :topic})
|> Repo.preload(:topics)
end
def update_stat_counts(episode) do
stats = Repo.all(assoc(episode, :episode_stats))
new_downloads =
stats
|> Enum.map(& &1.downloads)
|> Enum.sum()
|> Kernel.+(episode.import_count)
|> Kernel./(1)
|> Float.round(2)
new_reach =
stats
|> Enum.map(& &1.uniques)
|> Enum.sum()
episode
|> change(%{download_count: new_downloads, reach_count: new_reach})
|> Repo.update!()
end
def update_notes(episode, text) do
episode
|> change(notes: text)
|> Repo.update!()
end
def update_transcript(episode, text) do
case Transcripts.Parser.parse_text(text, participants(episode)) do
{:ok, parsed} ->
updated =
episode
|> change(transcript: parsed)
|> Repo.update!()
if !has_transcript(episode) && has_transcript(updated) do
Task.start_link(fn -> Notifier.notify(updated) end)
end
Task.start_link(fn -> Search.save_item(updated) end)
updated
{:error, e} ->
source = Github.Source.new("transcripts", episode)
Github.Issuer.create(source, e)
end
end
def flatten_for_filtering(query \\ __MODULE__) do
query =
from episode in query,
left_join: podcast in assoc(episode, :podcast),
left_join: hosts in assoc(episode, :hosts),
left_join: guests in assoc(episode, :guests),
left_join: topics in assoc(episode, :topics),
preload: [podcast: podcast, hosts: hosts, guests: guests, topics: topics]
result =
query
|> published()
|> exclude_transcript()
|> Repo.all()
|> Enum.map(fn episode ->
extract_episode_fields(episode)
end)
{:ok, result}
end
def flatten_episode_for_filtering(episode) do
episode
|> Repo.preload([:podcast, :hosts, :guests, :topics])
|> extract_episode_fields()
end
defp extract_episode_fields(episode) do
%{
id: episode.id,
slug: episode.slug,
title: episode.title,
type: episode.type,
published_at: episode.published_at,
podcast: episode.podcast.slug,
host: Enum.map(episode.hosts, fn host -> host.name end),
guest: Enum.map(episode.guests, fn guest -> guest.name end),
topic: Enum.map(episode.topics, fn topic -> topic.slug end)
}
end
defp derive_audio_bytes_and_duration(changeset = %{changes: %{audio_file: _}}) do
new_file = get_change(changeset, :audio_file)
tagged_file = EpisodeView.audio_local_path(%{changeset.data | audio_file: new_file})
case File.stat(tagged_file) do
{:ok, stats} ->
seconds = extract_duration_seconds(tagged_file)
change(changeset, audio_bytes: stats.size, audio_duration: seconds)
{:error, _} ->
changeset
end
end
defp derive_audio_bytes_and_duration(changeset), do: changeset
defp derive_plusplus_bytes_and_duration(changeset = %{changes: %{plusplus_file: _}}) do
new_file = get_change(changeset, :plusplus_file)
tagged_file = EpisodeView.plusplus_local_path(%{changeset.data | plusplus_file: new_file})
case File.stat(tagged_file) do
{:ok, stats} ->
seconds = extract_duration_seconds(tagged_file)
change(changeset, plusplus_bytes: stats.size, plusplus_duration: seconds)
{:error, _} ->
changeset
end
end
defp derive_plusplus_bytes_and_duration(changeset), do: changeset
defp extract_duration_seconds(path) do
try do
{info, _exit_code} = System.cmd("ffmpeg", ["-i", path], stderr_to_stdout: true)
[_match, duration] = Regex.run(~r/Duration: (.*?),/, info)
TimeView.seconds(duration)
catch
_all -> 0
end
end
defp validate_published_has_published_at(changeset) do
published = get_field(changeset, :published)
published_at = get_field(changeset, :published_at)
if published && is_nil(published_at) do
add_error(changeset, :published_at, "can't be blank when published")
else
changeset
end
end
end
| 29.562212 | 98 | 0.669447 |
79269e20ddfdf6049e1c6cf65b5d576f6b12e28d | 1,992 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/activity_content_details_playlist_item.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/activity_content_details_playlist_item.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/activity_content_details_playlist_item.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Model.ActivityContentDetailsPlaylistItem do
@moduledoc """
Information about a new playlist item.
## Attributes
- playlistId (String.t): The value that YouTube uses to uniquely identify the playlist. Defaults to: `null`.
- playlistItemId (String.t): ID of the item within the playlist. Defaults to: `null`.
- resourceId (ResourceId): The resourceId object contains information about the resource that was added to the playlist. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:playlistId => any(),
:playlistItemId => any(),
:resourceId => GoogleApi.YouTube.V3.Model.ResourceId.t()
}
field(:playlistId)
field(:playlistItemId)
field(:resourceId, as: GoogleApi.YouTube.V3.Model.ResourceId)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.ActivityContentDetailsPlaylistItem do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.ActivityContentDetailsPlaylistItem.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.ActivityContentDetailsPlaylistItem do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.888889 | 143 | 0.75 |
7926b85d744e501bc1bc7262b9ed484e5ebeea9b | 106 | ex | Elixir | lib/cai_data/repo.ex | Bentheburrito/caidata | 31ee94691293b926de77194585714d75c80fea0d | [
"MIT"
] | null | null | null | lib/cai_data/repo.ex | Bentheburrito/caidata | 31ee94691293b926de77194585714d75c80fea0d | [
"MIT"
] | null | null | null | lib/cai_data/repo.ex | Bentheburrito/caidata | 31ee94691293b926de77194585714d75c80fea0d | [
"MIT"
] | null | null | null | defmodule CAIData.Repo do
use Ecto.Repo,
otp_app: :caidata,
adapter: Ecto.Adapters.Postgres
end
| 17.666667 | 35 | 0.726415 |
7926bcab77d18feb0391162bbf5e5d5387f57bbe | 2,447 | ex | Elixir | test/support/test_client.ex | ashneyderman/barracuda | ecce6a1b4a217c0b676d7bfc711897817f4bc12e | [
"MIT"
] | 6 | 2016-08-24T17:53:31.000Z | 2021-02-05T11:18:16.000Z | test/support/test_client.ex | ashneyderman/barracuda | ecce6a1b4a217c0b676d7bfc711897817f4bc12e | [
"MIT"
] | 7 | 2016-12-28T13:09:30.000Z | 2017-03-06T11:11:10.000Z | test/support/test_client.ex | ashneyderman/barracuda | ecce6a1b4a217c0b676d7bfc711897817f4bc12e | [
"MIT"
] | null | null | null | defmodule Barracuda.TestClient.Hello1 do
@behaviour Barracuda.Interceptor
import Barracuda.Call
def init(opts), do: opts
def link(next, %Barracuda.Call{assigns: current} = params, _opts \\ []) do
params
|> assign(:chain, record_chain(Map.get(current, :chain, [])))
|> next.()
end
defp record_chain(current), do: current ++ ["Barracuda.TestClient.Hello1"]
end
defmodule Barracuda.TestClient.Hello2 do
@behaviour Barracuda.Interceptor
import Barracuda.Call
def init(opts), do: opts
def link(next, %Barracuda.Call{assigns: current} = params, _opts \\ []) do
params
|> assign(:chain, record_chain(Map.get(current, :chain, [])))
|> next.()
end
defp record_chain(current), do: current ++ ["Barracuda.TestClient.Hello2"]
end
defmodule Barracuda.TestClient.Adapter do
import Barracuda.Call
def docs(_, _), do: "Adapter: No docs."
def call(%Barracuda.Call{assigns: current}=params, action) do
params
|> assign(:chain, record_chain(Map.get(current, :chain, []), action))
end
defp record_chain(current, action), do: current ++ ["Barracuda.TestClient.Adapter.#{action}"]
end
defmodule Barracuda.TestClient.Adapter1 do
import Barracuda.Call
def docs(_, _), do: "Adapter1: No docs."
def call(%Barracuda.Call{assigns: current}=params, action) do
params
|> assign(:chain, record_chain(Map.get(current, :chain, []), action))
end
defp record_chain(current, action), do: current ++ ["Barracuda.TestClient.Adapter1.#{action}"]
end
defmodule Barracuda.TestClient do
use Barracuda.Client, adapter: Barracuda.TestClient.Adapter,
otp_app: :barracuda
import Barracuda.Call
interceptor :hello
interceptor Barracuda.TestClient.Hello1
interceptor Barracuda.TestClient.Hello2
call :user_repos,
path: "/users/{:username}/repos",
verb: :get,
required: [:username],
required_headers: ["accept"],
doc: ~S"""
Lists all repos for the user
""",
expect: 200
call :no_required, Barracuda.TestClient.Adapter1,
path: "/users/ashneyderman/repos",
verb: :get,
required_headers: ["accept"],
doc: ~S"""
Lists all repos for ashneyderman
""",
expect: 200
defp record_chain(current), do: current ++ ["hello"]
def hello(next, %Barracuda.Call{assigns: current} = params, _opts \\ []) do
params
|> assign(:chain, record_chain(Map.get(current, :chain, [])))
|> next.()
end
end
| 26.89011 | 96 | 0.67593 |
7926c2218184ecd87054585184b9ba45b6004249 | 1,277 | exs | Elixir | test/benchmark.exs | karlseguin/pbuf | 744dd119f6331b8fd74239387ba9ff97acf6b385 | [
"0BSD"
] | 4 | 2018-10-28T16:35:31.000Z | 2021-08-17T05:49:55.000Z | test/benchmark.exs | karlseguin/pbuf | 744dd119f6331b8fd74239387ba9ff97acf6b385 | [
"0BSD"
] | null | null | null | test/benchmark.exs | karlseguin/pbuf | 744dd119f6331b8fd74239387ba9ff97acf6b385 | [
"0BSD"
] | null | null | null | alias Pbuf.Tests.{Everything, Child, Sub}
everything = %Everything{
bool: true, int32: -21, int64: -9922232, uint32: 82882, uint64: 199922332321984,
sint32: -221331, sint64: -29, fixed32: 4294967295, sfixed32: -2147483647,
fixed64: 1844674407370955161, sfixed64: -9223372036854775807,
float: 2.5, double: -3.551, string: "over", bytes: <<9, 0, 0, 0>>,
struct: %Child{id: 9001, name: "goku"}, type: :EVERYTHING_TYPE_SAND, corpus: :web,
choice: {:choice_int32, 299}, user: %Sub.User{id: 1, status: 1}, user_status: :USER_STATUS_NORMAL,
bools: [true], int32s: [-21], int64s: [-9922232], uint32s: [82882], uint64s: [199922332321984],
sint32s: [-221331], sint64s: [-29], fixed32s: [4294967295], sfixed32s: [-2147483647],
fixed64s: [1844674407370955161], sfixed64s: [-9223372036854775807],
floats: [2.5], doubles: [-3.551], strings: ["over"],
bytess: [<<9, 0, 0, 0>>], map1: %{"over" => 9000}, map2: %{999999999999999 => 2.5},
structs: [%Child{id: 18, name: "tea"}],
types: [:EVERYTHING_TYPE_SAND], corpuss: [:products],
map3: %{9001 => %Child{id: 9001, name: "gohan"}}
}
binary = Pbuf.encode_to_iodata!(everything)
Benchee.run(%{
"encode" => fn -> Pbuf.encode_to_iodata!(everything) end,
"decode" => fn -> Pbuf.decode!(Everything, binary) end
})
| 47.296296 | 100 | 0.665623 |
7926c6f91d170f6412c052d95b71f91ff2ea2d16 | 211 | exs | Elixir | priv/repo/migrations/20190325091759_add_project_id_foreign_key.exs | mekongit/evercam_models | 7c36d52f2b47ffc89a3eaca65c2e353716fafacc | [
"MIT"
] | null | null | null | priv/repo/migrations/20190325091759_add_project_id_foreign_key.exs | mekongit/evercam_models | 7c36d52f2b47ffc89a3eaca65c2e353716fafacc | [
"MIT"
] | null | null | null | priv/repo/migrations/20190325091759_add_project_id_foreign_key.exs | mekongit/evercam_models | 7c36d52f2b47ffc89a3eaca65c2e353716fafacc | [
"MIT"
] | null | null | null | defmodule Evercam.Repo.Migrations.AddProjectIdForeignKey do
use Ecto.Migration
def change do
alter table(:cameras) do
add :project_id, references(:projects, on_delete: :nothing)
end
end
end
| 21.1 | 65 | 0.739336 |
7926c745744097f135c23ca474d5ec964f0caf9b | 461 | ex | Elixir | web/models/ticket.ex | Pianist038801/SprintPoker | ae14f79b8cd4254a1c5f5fef698db1cf2d20cf9c | [
"MIT"
] | null | null | null | web/models/ticket.ex | Pianist038801/SprintPoker | ae14f79b8cd4254a1c5f5fef698db1cf2d20cf9c | [
"MIT"
] | null | null | null | web/models/ticket.ex | Pianist038801/SprintPoker | ae14f79b8cd4254a1c5f5fef698db1cf2d20cf9c | [
"MIT"
] | null | null | null | defmodule SprintPoker.Ticket do
use SprintPoker.Web, :model
schema "tickets" do
field :name, :string
field :points, :string
belongs_to :game, SprintPoker.Game, type: :binary_id
timestamps
end
@required_fields ~w(name game_id)
@optional_fields ~w(points)
def changeset(model, params \\ :empty) do
model
|> cast(params, @required_fields, @optional_fields)
|> update_change(:name, &(String.slice(&1, 0..254)))
end
end
| 21.952381 | 56 | 0.683297 |
7926d3247389c85c639c4fcdc657beab35134ff8 | 2,598 | ex | Elixir | lib/exredis/sub.ex | aforward-oss/exredis | cf9e72e971231eaf1e99c422bd84bbb40215c84b | [
"MIT"
] | null | null | null | lib/exredis/sub.ex | aforward-oss/exredis | cf9e72e971231eaf1e99c422bd84bbb40215c84b | [
"MIT"
] | null | null | null | lib/exredis/sub.ex | aforward-oss/exredis | cf9e72e971231eaf1e99c422bd84bbb40215c84b | [
"MIT"
] | null | null | null | defmodule Exredis.Sub do
@moduledoc """
Subscribe functions for exredis
"""
defmacro __using__(_opts) do
quote do
import Exredis.Sub
end
end
@type reconnect :: :no_reconnect | integer
@type max_queue :: :infinity | integer
@type behaviour :: :drop | :exit
@type start_link :: { :ok, pid } | { :error, term }
@doc """
Connect to the Redis server for subscribe to a channel
* `start_link`
* `start_link('127.0.0.1', 6379)`
* `start_link('127.0.0.1', 6379, 'with_password')`
"""
@spec start_link(list, integer, list, reconnect, max_queue, behaviour) :: start_link
def start_link(host \\ '127.0.0.1', port \\ 6379, password \\ '',
reconnect \\ :no_reconnect, max_queue \\ :infinity,
behaviour \\ :drop), do:
:eredis_sub.start_link(host, port, password, reconnect, max_queue, behaviour)
@doc """
Connect to the Redis server for subscribe to a channel
* `start`
* `start('127.0.0.1', 6379)`
* `start('127.0.0.1', 6379, 'with_password')`
"""
@spec start(list, integer, list, reconnect, max_queue, behaviour) :: pid
def start(host \\ '127.0.0.1', port \\ 6379, password \\ '',
reconnect \\ :no_reconnect, max_queue \\ :infinity,
behaviour \\ :drop), do:
:eredis_sub.start_link(host, port, password, reconnect, max_queue, behaviour)
|> elem 1
@doc """
Disconnect from the Redis server:
* `stop(client)`
Client is a pid getting from start command
"""
@spec stop(pid) :: :ok
def stop(client), do:
:eredis_sub.stop(client)
@doc """
Subscribe to a channel
* `subscribe(client, "some_channel", fn(msg) -> ... end)`
"""
@spec subscribe(pid, binary, term) :: any
def subscribe(client, channel, term) do
spawn_link fn ->
:eredis_sub.controlling_process client
:eredis_sub.subscribe client, [channel]
receiver(client, term)
end
end
@doc """
Subscribe to a channel by pattern
* `psubscribe(client, "some_channel_*", fn(msg) -> ... end)`
"""
@spec psubscribe(pid, binary, term) :: any
def psubscribe(client, channel, term) do
spawn_link fn ->
:eredis_sub.controlling_process client
:eredis_sub.psubscribe client, [channel]
receiver(client, term)
end
end
@spec ack_message(pid) :: any
defp ack_message(client) when is_pid(client), do:
:eredis_sub.ack_message(client)
@spec receiver(pid, term) :: any
defp receiver(pid, callback) do
receive do
msg ->
ack_message(pid)
callback.(msg)
receiver(pid, callback)
end
end
end
| 26.510204 | 86 | 0.628945 |
7926d85ebd23682c8252821c6c37bef482e0d577 | 5,483 | ex | Elixir | lib/float_pp/round.ex | juliandicks/elixir-float_pp | 129ff8f71e90b69d70be2d1fa4f7fb3d7a96945e | [
"Apache-2.0"
] | 2 | 2019-04-29T17:51:56.000Z | 2020-01-02T18:25:45.000Z | lib/float_pp/round.ex | juliandicks/elixir-float_pp | 129ff8f71e90b69d70be2d1fa4f7fb3d7a96945e | [
"Apache-2.0"
] | 2 | 2016-06-10T18:59:39.000Z | 2020-04-10T10:22:37.000Z | lib/float_pp/round.ex | ewildgoose/elixir-float_pp | 303421cddbb97e9877edb1d79cc33c5ede236ed3 | [
"Apache-2.0"
] | 2 | 2016-08-16T08:38:56.000Z | 2019-04-29T17:52:05.000Z | defmodule FloatPP.Round do
@moduledoc """
Implement rounding of a list of decimal digits to an arbitrary precision
using one of several rounding algorithms.
Rounding algorithms are based on the definitions given in IEEE 754, but also
include 2 additional options (effectively the complementary versions):
## Rounding algorithms
Directed roundings:
* `:down` - Round towards 0 (truncate), eg 10.9 rounds to 10.0
* `:up` - Round away from 0, eg 10.1 rounds to 11.0. (Non IEEE algorithm)
* `:ceiling` - Round toward +∞ - Also known as rounding up or ceiling
* `:floor` - Round toward -∞ - Also known as rounding down or floor
Round to nearest:
* `:half_even` - Round to nearest value, but in a tiebreak, round towards the
nearest value with an even (zero) least significant bit, which occurs 50%
of the time. This is the default for IEEE binary floating-point and the recommended
value for decimal.
* `:half_up` - Round to nearest value, but in a tiebreak, round away from 0.
This is the default algorithm for Erlang's Kernel.round/2
* `:half_down` - Round to nearest value, but in a tiebreak, round towards 0
(Non IEEE algorithm)
"""
require Integer
@type rounding :: :down |
:half_up |
:half_even |
:ceiling |
:floor |
:half_down |
:up
@doc """
Round a digit using a specified rounding.
Given a list of decimal digits (without trailing zeros) in the form
sign [sig_digits] | least_sig | tie | [rest]
There are a number of rounding options which may be conditional on for example
- sign of the orignal number
- even-ness of the least_sig digit
- whether there is a non-zero tie break digit
- if the tie break digit is 5, whether there are further non zero digits
The various rounding rules are based on IEEE 754 and documented in the moduledoc
Takes input digits_t = {digits, place, positive}
"""
def round(digits_t, options)
# Passing true for decimal places avoids rounding and uses whatever is necessary
def round(digits_t, %{scientific: true}), do: digits_t
def round(digits_t, %{decimals: true}), do: digits_t
# rounded away all the decimals... return 0
def round(_, %{scientific: dp}) when dp <= 0,
do: {[0], 1, true}
def round({_, place, _}, %{decimals: dp}) when dp + place <= 0,
do: {[0], 1, true}
# scientific/decimal rounding are the same, we are just varying which
# digit we start counting from to find our rounding point
def round(digits_t, options = %{scientific: dp}),
do: do_round(digits_t, dp, options)
def round(digits_t = {_, place, _}, options = %{decimals: dp}),
do: do_round(digits_t, dp + place - 1, options)
defp do_round({digits, place, positive}, round_at, %{rounding: rounding}) do
case Enum.split(digits, round_at) do
{l, [least_sig | [tie | rest]]} ->
case do_incr(l, least_sig, increment?(positive, least_sig, tie, rest, rounding)) do
[:rollover | digits] -> {digits, place + 1, positive}
digits -> {digits, place, positive}
end
{l, [least_sig | []]} -> {[l, least_sig], place, positive}
{l, []} -> {l, place, positive}
end
end
defp do_incr(l, least_sig, false), do: [l, least_sig]
defp do_incr(l, least_sig, true) when least_sig < 9, do: [l, least_sig + 1]
# else need to cascade the increment
defp do_incr(l, 9, true) do
l
|> Enum.reverse
|> cascade_incr
|> Enum.reverse([0])
end
# cascade an increment of decimal digits which could be rolling over 9 -> 0
defp cascade_incr([9 | rest]), do: [0 | cascade_incr(rest)]
defp cascade_incr([d | rest]), do: [d+1 | rest]
defp cascade_incr([]), do: [1, :rollover]
@spec increment?(boolean, non_neg_integer | nil, non_neg_integer | nil, list, FloatPP.rounding) :: non_neg_integer
defp increment?(positive, least_sig, tie, rest, round)
# Directed rounding towards 0 (truncate)
defp increment?(_, _ls, _tie, _, :down), do: false
# Directed rounding away from 0 (non IEEE option)
defp increment?(_, _ls, nil, _, :up), do: false
defp increment?(_, _ls, _tie, _, :up), do: true
# Directed rounding towards +∞ (rounding up / ceiling)
defp increment?(true, _ls, tie, _, :ceiling) when tie != nil, do: true
defp increment?(_, _ls, _tie, _, :ceiling), do: false
# Directed rounding towards -∞ (rounding down / floor)
defp increment?(false, _ls, tie, _, :floor) when tie != nil, do: true
defp increment?(_, _ls, _tie, _, :floor), do: false
# Round to nearest - tiebreaks by rounding to even
# Default IEEE rounding, recommended default for decimal
defp increment?(_, ls, 5, [], :half_even) when Integer.is_even(ls), do: false
defp increment?(_, _ls, tie, _rest, :half_even) when tie >= 5, do: true
defp increment?(_, _ls, _tie, _rest, :half_even), do: false
# Round to nearest - tiebreaks by rounding away from zero (same as Elixir Kernel.round)
defp increment?(_, _ls, tie, _rest, :half_up) when tie >= 5, do: true
defp increment?(_, _ls, _tie, _rest, :half_up), do: false
# Round to nearest - tiebreaks by rounding towards zero (non IEEE option)
defp increment?(_, _ls, 5, [], :half_down), do: false
defp increment?(_, _ls, tie, _rest, :half_down) when tie >= 5, do: true
defp increment?(_, _ls, _tie, _rest, :half_down), do: false
end
| 40.614815 | 116 | 0.654569 |
7926dab55a6291ad73c352fb6b6fe4eb3b059a20 | 6,989 | ex | Elixir | lib/scanner/lib/utils.ex | Ajwah/ex-gherkin | 401ee551312b72aa059c7f4621f014b111049acf | [
"Apache-2.0"
] | 8 | 2020-07-27T09:11:24.000Z | 2020-09-21T20:57:45.000Z | lib/scanner/lib/utils.ex | Ajwah/ex-gherkin | 401ee551312b72aa059c7f4621f014b111049acf | [
"Apache-2.0"
] | 1 | 2021-05-14T13:30:27.000Z | 2021-05-14T13:30:27.000Z | lib/scanner/lib/utils.ex | Ajwah/ex-gherkin | 401ee551312b72aa059c7f4621f014b111049acf | [
"Apache-2.0"
] | null | null | null | defmodule ExGherkin.Scanner.Utils do
@moduledoc false
def data_table_pipe_splitter(line, offset_count \\ 0)
def data_table_pipe_splitter(line, offset_count) when is_integer(offset_count) do
data_table_pipe_splitter(line, {true, false, offset_count, offset_count, "", []})
end
def data_table_pipe_splitter("", {_, _, prev_count, _, cell, cells}) do
if cell == "" do
cells ++ [{prev_count + 1, cell}]
else
cells ++ [{prev_count, cell}]
end
end
def data_table_pipe_splitter(
<<"|", rest::binary>>,
{leading_spaces_to_skip?, _, prev_count, count, cell, cells}
) do
if leading_spaces_to_skip? do
data_table_pipe_splitter(
rest,
{true, false, count + 1, count + 1, "", cells ++ [{prev_count + 1, cell}]}
)
else
data_table_pipe_splitter(
rest,
{true, false, count + 1, count + 1, "", cells ++ [{prev_count, cell}]}
)
end
end
def data_table_pipe_splitter(
<<"\\\\", rest::binary>>,
{leading_spaces_to_skip?, _, prev_count, count, cell, cells}
) do
if leading_spaces_to_skip? do
data_table_pipe_splitter(
rest,
{false, false, prev_count + 1, count + 2, cell <> "\\", cells}
)
else
data_table_pipe_splitter(rest, {false, false, prev_count, count + 2, cell <> "\\", cells})
end
end
def data_table_pipe_splitter(
<<"\\|", rest::binary>>,
{leading_spaces_to_skip?, _, prev_count, count, cell, cells}
) do
if leading_spaces_to_skip? do
data_table_pipe_splitter(
rest,
{false, false, prev_count + 1, count + 2, cell <> "|", cells}
)
else
data_table_pipe_splitter(rest, {false, false, prev_count, count + 2, cell <> "|", cells})
end
end
def data_table_pipe_splitter(
<<"\\n", rest::binary>>,
{leading_spaces_to_skip?, _, prev_count, count, cell, cells}
) do
if leading_spaces_to_skip? do
data_table_pipe_splitter(
rest,
{false, false, prev_count + 1, count + 2, cell <> "\n", cells}
)
else
data_table_pipe_splitter(rest, {false, false, prev_count, count + 2, cell <> "\n", cells})
end
end
def data_table_pipe_splitter(
<<"\\", rest::binary>>,
{leading_spaces_to_skip?, _, prev_count, count, cell, cells}
) do
if leading_spaces_to_skip? do
data_table_pipe_splitter(
rest,
{false, false, prev_count + 1, count + 2, cell <> "\\", cells}
)
else
data_table_pipe_splitter(rest, {false, false, prev_count, count + 2, cell <> "\\", cells})
end
end
def data_table_pipe_splitter(
<<"\n", rest::binary>>,
{leading_spaces_to_skip?, _, prev_count, count, cell, cells}
) do
if leading_spaces_to_skip? do
data_table_pipe_splitter(
rest,
{false, false, prev_count + 1, count + 2, cell <> "\n", cells}
)
else
data_table_pipe_splitter(rest, {false, false, prev_count, count + 2, cell <> "\n", cells})
end
end
def data_table_pipe_splitter(
<<"\t", rest::binary>>,
{leading_spaces_to_skip?, _, prev_count, count, cell, cells}
) do
if leading_spaces_to_skip? do
data_table_pipe_splitter(
rest,
{leading_spaces_to_skip?, false, prev_count, count, cell, cells}
)
else
data_table_pipe_splitter(
rest,
{leading_spaces_to_skip?, false, prev_count, count, cell, cells}
)
end
end
def data_table_pipe_splitter(
<<160::utf8, rest::binary>>,
{leading_spaces_to_skip?, _, prev_count, count, cell, cells}
) do
if leading_spaces_to_skip? do
data_table_pipe_splitter(
rest,
{leading_spaces_to_skip?, false, prev_count + 2, count + 2, cell, cells}
)
else
data_table_pipe_splitter(
rest,
{leading_spaces_to_skip?, false, prev_count, count + 2, cell, cells}
)
end
end
def data_table_pipe_splitter(
<<" ", rest::binary>>,
{leading_spaces_to_skip?, _, prev_count, count, cell, cells}
) do
if leading_spaces_to_skip? do
data_table_pipe_splitter(
rest,
{leading_spaces_to_skip?, true, prev_count + 1, count + 1, cell, cells}
)
else
data_table_pipe_splitter(
rest,
{leading_spaces_to_skip?, false, prev_count, count + 1, cell <> " ", cells}
)
end
end
def data_table_pipe_splitter(
<<char::utf8, rest::binary>>,
{leading_spaces_to_skip?, _, prev_count, count, cell, cells}
) do
if leading_spaces_to_skip? do
data_table_pipe_splitter(
rest,
{false, false, prev_count + 1, count + 1, cell <> <<char::utf8>>, cells}
)
else
data_table_pipe_splitter(
rest,
{false, false, prev_count, count + 1, cell <> <<char::utf8>>, cells}
)
end
end
def count_spaces_before(<<" ", rest::binary>>, count) do
count_spaces_before(rest, count + 1)
end
def count_spaces_before(trimmed_trailing, count) do
{count, trimmed_trailing}
end
def pad_leading(line, 0), do: line
def pad_leading(line, amount_spaces) do
pad_leading(" " <> line, amount_spaces - 1)
end
def trim_line(line, %{doc_string: false}) do
{line, column_count} = trim_leading_white_spaces(line, 1)
{String.trim_trailing(line), column_count}
end
def trim_line(line, %{doc_string: {1, _}}), do: {String.trim_trailing(line, "\n"), 1}
def trim_line(line, %{doc_string: {trim_length, _}}) do
trim_length = trim_length - 1
trimmed_line =
line
|> trim_fixed_number_leading_white_spaces(trim_length)
|> case do
{trimmed_line, :wrongly_indented_line_within_doc_string} ->
trimmed_line
{trimmed_line, _} ->
trimmed_line
end
|> String.trim_trailing("\n")
{trimmed_line, trim_length + 1}
end
defp trim_leading_white_spaces(<<" ", rest::binary>>, column_count) do
trim_leading_white_spaces(rest, column_count + 1)
end
defp trim_leading_white_spaces(<<"\n", rest::binary>>, column_count) do
trim_leading_white_spaces(rest, column_count)
end
defp trim_leading_white_spaces(line, column_count) do
{line, column_count}
end
defp trim_fixed_number_leading_white_spaces(remainder, 0) do
{remainder, 0}
end
defp trim_fixed_number_leading_white_spaces(<<" ", rest::binary>>, remaining_char_count) do
trim_fixed_number_leading_white_spaces(rest, remaining_char_count - 1)
end
defp trim_fixed_number_leading_white_spaces(<<"\n">>, remaining_char_count) do
remaining_char_count =
if remaining_char_count == 0 do
0
else
:wrongly_indented_line_within_doc_string
end
{"", remaining_char_count}
end
defp trim_fixed_number_leading_white_spaces(remainder, _) do
{remainder, :wrongly_indented_line_within_doc_string}
end
end
| 28.295547 | 96 | 0.629561 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.