hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
798749324ea20fa6021c4a5f91c435b4abfd1ebe | 825 | ex | Elixir | web/router.ex | avinoth/spaces | 00bec5adf4568fef73b49e57808033295a837931 | [
"MIT"
] | 1 | 2016-09-13T10:40:53.000Z | 2016-09-13T10:40:53.000Z | web/router.ex | avinoth/spaces | 00bec5adf4568fef73b49e57808033295a837931 | [
"MIT"
] | null | null | null | web/router.ex | avinoth/spaces | 00bec5adf4568fef73b49e57808033295a837931 | [
"MIT"
] | null | null | null | defmodule Spaces.Router do
use Spaces.Web, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", Spaces do
pipe_through :browser # Use the default browser stack
get "/", PageController, :index
get "/loggedIn", PageController, :loggedIn
resources "/post", PostController
resources "/session", SessionController, only: [:index, :new]
post "/session/slackLogin", SessionController, :slackLogin
delete "/session/destroy", SessionController, :destroy
get "/user", UsersController, :show
end
# Other scopes may use custom stacks.
# scope "/api", Spaces do
# pipe_through :api
# end
end
| 22.916667 | 65 | 0.683636 |
79875ab5e5d35e8dce615b9c36a3d4a756ce422f | 711 | ex | Elixir | lib/twaddler_web/gettext.ex | woeye/twaddler | e06a22a94520055bc33aaaacfe51989ba8ab665f | [
"Apache-2.0"
] | 1 | 2018-10-08T13:57:08.000Z | 2018-10-08T13:57:08.000Z | lib/twaddler_web/gettext.ex | woeye/twaddler | e06a22a94520055bc33aaaacfe51989ba8ab665f | [
"Apache-2.0"
] | null | null | null | lib/twaddler_web/gettext.ex | woeye/twaddler | e06a22a94520055bc33aaaacfe51989ba8ab665f | [
"Apache-2.0"
] | 1 | 2018-10-08T13:55:44.000Z | 2018-10-08T13:55:44.000Z | defmodule TwaddlerWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import TwaddlerWeb.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :twaddler
end
| 28.44 | 72 | 0.682138 |
798794eb7ca0d436d82ea6fd95dbc369f754ce6c | 2,782 | exs | Elixir | mix.exs | jwarlander/appsignal-elixir | b0a8a366370cf01a407a381a3041d0f6516f476e | [
"MIT"
] | null | null | null | mix.exs | jwarlander/appsignal-elixir | b0a8a366370cf01a407a381a3041d0f6516f476e | [
"MIT"
] | null | null | null | mix.exs | jwarlander/appsignal-elixir | b0a8a366370cf01a407a381a3041d0f6516f476e | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.Compile.Appsignal do
use Mix.Task
def run(_args) do
{_, _} = Code.eval_file("mix_helpers.exs")
Mix.Appsignal.Helper.install()
end
end
defmodule Appsignal.Mixfile do
use Mix.Project
def project do
[
app: :appsignal,
version: "1.9.3",
name: "AppSignal",
description: description(),
package: package(),
source_url: "https://github.com/appsignal/appsignal-elixir",
homepage_url: "https://appsignal.com",
test_paths: test_paths(Mix.env()),
elixir: "~> 1.0",
compilers: compilers(Mix.env()),
elixirc_paths: elixirc_paths(Mix.env()),
deps: deps(),
docs: [main: "Appsignal", logo: "logo.png"],
dialyzer: [
plt_add_deps: :transitive,
plt_add_apps: [:mix],
ignore_warnings: "dialyzer.ignore-warnings"
]
]
end
defp description do
"Collects error and performance data from your Elixir applications and sends it to AppSignal"
end
defp package do
%{
files: [
"lib",
"c_src/*.[ch]",
"mix.exs",
"mix_helpers.exs",
"*.md",
"LICENSE",
"Makefile",
"agent.exs",
"priv/cacert.pem"
],
maintainers: ["Jeff Kreeftmeijer", "Tom de Bruijn"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/appsignal/appsignal-elixir"}
}
end
def application do
[mod: {Appsignal, []}, applications: [:logger, :decorator, :hackney, :poison]]
end
defp compilers(:test_phoenix), do: [:phoenix] ++ compilers(:prod)
defp compilers(_), do: [:appsignal] ++ Mix.compilers()
defp test_paths(:test_phoenix), do: ["test/appsignal", "test/mix", "test/phoenix"]
defp test_paths(_), do: ["test/appsignal", "test/mix"]
defp elixirc_paths(env) do
case test?(env) do
true -> ["lib", "test/support"]
false -> ["lib"]
end
end
defp test?(:test), do: true
defp test?(:test_phoenix), do: true
defp test?(:test_no_nif), do: true
defp test?(_), do: false
defp deps do
poison_version =
case Version.compare(System.version(), "1.6.0") do
:lt -> ">= 1.3.0 and < 4.0.0"
_ -> ">= 1.3.0"
end
[
{:hackney, "~> 1.6"},
{:poison, poison_version},
{:decorator, "~> 1.2.3"},
{:plug, ">= 1.1.0", optional: true},
{:phoenix, ">= 1.2.0", optional: true, only: [:prod, :test_phoenix, :dev]},
{:bypass, "~> 0.6.0", only: [:test, :test_phoenix, :test_no_nif]},
{:plug_cowboy, "~> 1.0", only: [:test, :test_phoenix, :test_no_nif]},
{:ex_doc, "~> 0.12", only: :dev, runtime: false},
{:credo, "~> 1.0.0", only: [:test, :dev], runtime: false},
{:dialyxir, "~> 1.0.0-rc4", only: [:dev], runtime: false}
]
end
end
| 27.27451 | 97 | 0.568656 |
7987a9f58697bde1df2c956c75ce4f2930464de6 | 550 | exs | Elixir | test/joi/validator/required_test.exs | scottming/joi | 1c7546bb0473fa53325533c7ab4aec402bfba0d1 | [
"MIT"
] | 25 | 2020-12-03T08:14:51.000Z | 2021-09-01T15:34:30.000Z | test/joi/validator/required_test.exs | scottming/joi | 1c7546bb0473fa53325533c7ab4aec402bfba0d1 | [
"MIT"
] | 5 | 2021-02-13T12:56:56.000Z | 2021-07-30T01:27:51.000Z | test/joi/validator/required_test.exs | scottming/joi | 1c7546bb0473fa53325533c7ab4aec402bfba0d1 | [
"MIT"
] | 2 | 2021-03-15T00:37:13.000Z | 2021-07-26T15:21:55.000Z | defmodule Joi.Validator.RequiredTest do
use ExUnit.Case, async: true
import Joi.Support.Util
@field :field
describe "required test" do
for t <- all_types() do
test "error: with nil field when validate #{t} type" do
data = %{@field => nil}
type_module = unquote(t) |> atom_type_to_mod()
assert {:error, error} = apply(type_module, :validate_field, [@field, data, []])
assert error.type == "#{unquote(t)}.required"
assert error.message == "#{@field} is required"
end
end
end
end
| 27.5 | 88 | 0.621818 |
7987b054ed5f8f39959a2dba64ed387775425ec1 | 3,188 | ex | Elixir | lib/tai_shang/nft_plus_fetcher.ex | leeduckgo/Tai-Shang | 33831f54b0ef8eb7772b0b559d8fe8897a06b57c | [
"MIT"
] | 17 | 2021-06-10T07:39:18.000Z | 2021-12-16T03:26:53.000Z | lib/tai_shang/nft_plus_fetcher.ex | leeduckgo/Tai-Shang | 33831f54b0ef8eb7772b0b559d8fe8897a06b57c | [
"MIT"
] | 4 | 2021-05-19T02:09:53.000Z | 2021-07-03T09:34:09.000Z | lib/tai_shang/nft_plus_fetcher.ex | leeduckgo/Tai-Shang | 33831f54b0ef8eb7772b0b559d8fe8897a06b57c | [
"MIT"
] | 4 | 2021-07-06T02:50:29.000Z | 2021-12-21T09:33:20.000Z | defmodule TaiShang.NFTPlusFetcher do
@moduledoc """
Fetch NFT-Plus
"""
alias TaiShang.{KeyGenerator, NFTPlusInteractor}
@contract_keys [:limits, :rules]
def fetch_tokens_info(chain_id, evi_contract_addr, erc721_contract_addr, token_id_list) do
Enum.map(token_id_list, fn token_id ->
owner = NFTPlusInteractor.owner_of(erc721_contract_addr, token_id)
extra_info = get_extra_info(chain_id, evi_contract_addr, erc721_contract_addr, token_id)
%{
token_id: token_id,
owner: owner,
extra_info: extra_info
}
end)
end
def fetch_best_nft(contract_addr, addr_str) do
balance = NFTPlusInteractor.balance_of(contract_addr, addr_str)
NFTPlusInteractor.token_of_owner_by_index(
contract_addr,
addr_str,
balance - 1
)
end
def get_nft_plus_balance(chain_id, evi_contract_addr, erc721_contract_addr, addr_str) do
erc721_balance = get_erc721_balance(erc721_contract_addr, addr_str)
Enum.map(erc721_balance, fn %{token_id: token_id} = basic_info ->
extra_info =
get_extra_info(
chain_id,
evi_contract_addr,
erc721_contract_addr,
token_id
)
Map.put(basic_info, :extra_info, extra_info)
end)
end
@spec get_erc721_balance(binary, binary) :: list
def get_erc721_balance(contract_addr, addr_str) do
balance = NFTPlusInteractor.balance_of(contract_addr, addr_str)
if balance == 0 do
[]
else
0..(balance - 1)
|> Enum.map(fn index ->
NFTPlusInteractor.token_of_owner_by_index(
contract_addr,
addr_str,
index
)
end)
|> Enum.map(fn token_id ->
%{
token_id: token_id,
uri: NFTPlusInteractor.token_uri(contract_addr, token_id)
}
end)
end
end
def get_extra_info(chain_id, evi_contract_addr, erc721_contract_addr, token_id) do
unique_token_id =
KeyGenerator.gen_unique_token_id(
chain_id,
erc721_contract_addr,
token_id
)
basic_info =
NFTPlusInteractor.get_evidence_by_key(
evi_contract_addr,
unique_token_id
)
gene =
NFTPlusInteractor.get_evidence_by_key(
evi_contract_addr,
KeyGenerator.gen_key(unique_token_id, :gene)
)
parent =
NFTPlusInteractor.get_evidence_by_key(
evi_contract_addr,
KeyGenerator.gen_key(unique_token_id, :parent)
)
%{
basic_info: basic_info,
gene: gene,
parent: parent
}
end
@spec get_contract_info(integer, String.t(), String.t()) :: List.t()
def get_contract_info(chain_id, erc721_addr, evi_contract_addr) do
Enum.map(@contract_keys, fn elem ->
do_get_contract_info(
elem,
chain_id,
erc721_addr,
evi_contract_addr
)
end)
end
def do_get_contract_info(elem, chain_id, erc721_addr, evi_contract_addr) do
contract_full = KeyGenerator.gen_contract_full(chain_id, erc721_addr)
key = KeyGenerator.gen_key(contract_full, elem)
NFTPlusInteractor.get_evidence_by_key(
evi_contract_addr,
key
)
end
end
| 25.102362 | 94 | 0.664994 |
7987f2de53cf6c7bb74a402e4314969ee2fa3618 | 526 | exs | Elixir | test/subtype_test.exs | PokemonTCG/pokemon-tcg-sdk-elixir | f8866044f4a5439408e04b7a68e893a8bbc1712c | [
"MIT"
] | 7 | 2016-09-01T14:33:45.000Z | 2021-10-01T03:42:06.000Z | test/subtype_test.exs | PokemonTCG/pokemon-tcg-sdk-elixir | f8866044f4a5439408e04b7a68e893a8bbc1712c | [
"MIT"
] | 1 | 2016-09-06T02:03:22.000Z | 2016-09-08T01:03:11.000Z | test/subtype_test.exs | PokemonTCG/pokemon-tcg-sdk-elixir | f8866044f4a5439408e04b7a68e893a8bbc1712c | [
"MIT"
] | 3 | 2017-11-21T16:01:51.000Z | 2021-10-01T03:42:07.000Z | defmodule Pokemon.SubtypeTest do
use ExUnit.Case
use ExVCR.Mock, adapter: ExVCR.Adapter.Hackney
setup_all do
ExVCR.Config.cassette_library_dir("fixture/vcr_cassettes", "fixture/custom_cassettes")
end
test "resource returns subtypes" do
assert Pokemon.Subtype.resource === "subtypes"
end
test "test_all_returns_all_subtypes" do
use_cassette "all_subtypes" do
types = Pokemon.Subtype.all
assert Enum.count(types) > 10
assert Enum.member?(types, "Basic") === true
end
end
end
| 25.047619 | 90 | 0.722433 |
7988197d2d247ed2e23800ae8efb49f16620d421 | 6,008 | ex | Elixir | lib/cafex/producer/worker.ex | MishaConway/cafex | 32965b7e099bc45de24c229cc76f7b83b35ff7b4 | [
"Apache-2.0"
] | null | null | null | lib/cafex/producer/worker.ex | MishaConway/cafex | 32965b7e099bc45de24c229cc76f7b83b35ff7b4 | [
"Apache-2.0"
] | null | null | null | lib/cafex/producer/worker.ex | MishaConway/cafex | 32965b7e099bc45de24c229cc76f7b83b35ff7b4 | [
"Apache-2.0"
] | null | null | null | defmodule Cafex.Producer.Worker do
use GenServer
defmodule State do
@moduledoc false
defstruct broker: nil,
topic: nil,
partition: nil,
client_id: nil,
conn: nil,
acks: 1,
batch_num: nil,
batches: [],
# max_request_size: nil,
linger_ms: 0,
timer: nil,
timeout: 60000,
compression: nil
end
alias Cafex.Connection
alias Cafex.Protocol.Produce.Request
alias Cafex.Protocol.Produce.Response
# ===================================================================
# API
# ===================================================================
def start_link(broker, topic, partition, opts \\ []) do
GenServer.start_link __MODULE__, [broker, topic, partition, opts]
end
def produce(pid, message) do
GenServer.call pid, {:produce, message}
end
def async_produce(pid, message) do
GenServer.cast pid, {:produce, message}
end
def stop(pid) do
GenServer.call pid, :stop
end
# ===================================================================
# GenServer callbacks
# ===================================================================
def init([{host, port} = broker, topic, partition, opts]) do
acks = Keyword.get(opts, :acks, 1)
timeout = Keyword.get(opts, :timeout, 60000)
client_id = Keyword.get(opts, :client_id, "cafex")
batch_num = Keyword.get(opts, :batch_num)
# max_request_size = Keyword.get(opts, :max_request_size)
linger_ms = Keyword.get(opts, :linger_ms)
compression = Keyword.get(opts, :compression)
state = %State{ broker: broker,
topic: topic,
partition: partition,
client_id: client_id,
acks: acks,
batch_num: batch_num,
# max_request_size: max_request_size,
linger_ms: linger_ms,
timeout: timeout,
compression: compression}
case Connection.start_link(host, port, client_id: client_id) do
{:ok, pid} ->
{:ok, %{state | conn: pid}}
{:error, reason} ->
{:stop, reason}
end
end
def handle_call({:produce, message}, from, state) do
maybe_produce(message, from, state)
end
def handle_call(:stop, _from, state) do
{:stop, :normal, :ok, state}
end
def handle_cast({:produce, message}, state) do
maybe_produce(message, nil, state)
end
def handle_info({:timeout, timer, :linger_timeout}, %{timer: timer, batches: batches} = state) do
result = batches |> Enum.reverse |> do_produce(state)
state = %{state|timer: nil, batches: []}
case result do
:ok ->
{:noreply, state}
{:error, reason} ->
{:stop, reason, state}
end
end
def terminate(reason, %{conn: conn, batches: batches}) do
case batches do
nil -> :ok
[] -> :ok
batches ->
Enum.each(batches, fn {from, _} ->
do_reply({from, {:error, reason}})
end)
end
if conn, do: Connection.close(conn)
:ok
end
# ===================================================================
# Internal functions
# ===================================================================
defp maybe_produce(message, from, %{linger_ms: linger_ms} = state) when is_integer(linger_ms) and linger_ms <= 0 do
case do_produce([{from, message}], state) do
:ok -> {:noreply, state}
{:error, reason} -> {:stop, reason, state}
end
end
defp maybe_produce(message, from, %{batches: batches, batch_num: batch_num} = state) when length(batches) + 1 >= batch_num do
result = [{from, message}|batches] |> Enum.reverse |> do_produce(state)
state = %{state|batches: []}
case result do
:ok -> {:noreply, state}
{:error, reason} -> {:stop, reason, state}
end
end
defp maybe_produce(message, from, %{linger_ms: linger_ms, batches: batches, timer: timer} = state) do
timer = case timer do
nil ->
:erlang.start_timer(linger_ms, self, :linger_timeout)
timer ->
timer
end
{:noreply, %{state|batches: [{from, message}|batches], timer: timer}}
end
defp do_produce([], _state), do: :ok
defp do_produce(message_pairs, state) do
case do_request(message_pairs, state) do
{:ok, replies} ->
Enum.each(replies, &do_reply/1)
:ok
{:error, reason} ->
# Enum.each(message_pairs, &(do_reply({elem(&1, 0), reason})))
Enum.each(message_pairs, fn {from, _message} ->
do_reply({from, reason})
end)
{:error, reason}
end
end
defp do_request(message_pairs, %{topic: topic,
partition: partition,
acks: acks,
timeout: timeout,
compression: compression,
conn: conn}) do
messages = Enum.map(message_pairs, fn {_from, message} ->
%{message | topic: topic, partition: partition}
end)
request = %Request{ required_acks: acks,
timeout: timeout,
compression: compression,
messages: messages }
case Connection.request(conn, request) do
{:ok, %Response{topics: [{^topic, [%{error: :no_error, partition: ^partition}]}]}} ->
replies = Enum.map(message_pairs, fn {from, _} ->
{from, :ok}
end)
{:ok, replies}
{:ok, %Response{topics: [{^topic, [%{error: reason}]}]}} ->
replies = Enum.map(message_pairs, fn {from, _} ->
{from, {:error, reason}}
end)
{:ok, replies}
{:error, reason} ->
{:error, reason}
end
end
defp do_reply({nil, _reply}), do: :ok
defp do_reply({from, reply}), do: GenServer.reply(from, reply)
end
| 31.455497 | 127 | 0.519973 |
79881ad4f29223d9a79e42de338baed62cb27149 | 8,189 | exs | Elixir | exercises/scale-generator/scale_generator_test.exs | jerith/elixir | 9a3f2a2fbee26a7b6a6b3ad74a9e6d1ff2495ed4 | [
"Apache-2.0"
] | null | null | null | exercises/scale-generator/scale_generator_test.exs | jerith/elixir | 9a3f2a2fbee26a7b6a6b3ad74a9e6d1ff2495ed4 | [
"Apache-2.0"
] | null | null | null | exercises/scale-generator/scale_generator_test.exs | jerith/elixir | 9a3f2a2fbee26a7b6a6b3ad74a9e6d1ff2495ed4 | [
"Apache-2.0"
] | 1 | 2018-07-19T23:43:56.000Z | 2018-07-19T23:43:56.000Z | if !System.get_env("EXERCISM_TEST_EXAMPLES") do
Code.load_file("scale_generator.exs", __DIR__)
end
ExUnit.start()
ExUnit.configure(trace: true, exclude: :pending)
defmodule ScaleGeneratorTest do
use ExUnit.Case
@major_scale_pattern "MMmMMMm"
@minor_scale_pattern "MmMMmMM"
@dorian_scale_pattern "MmMMMmM"
@mixolydian_scale_pattern "MMmMMmM"
@lydian_scale_pattern "MMMmMMm"
@phrygian_scale_pattern "mMMMmMM"
@locrian_scale_pattern "mMMmMMM"
@harmonic_minor_scale_pattern "MmMMmAm"
@melodic_minor_scale_pattern "MmMMMMm"
@octatonic_scale_pattern "MmMmMmMm"
@hexatonic_scale_pattern "MMMMMM"
@pentatonic_scale_pattern "MMAMA"
@enigmatic_scale_pattern "mAMMMmm"
describe "step to next note" do
# @tag :pending
test "with half-tone interval" do
assert ScaleGenerator.step(~w(C C# D D# E F F# G G# A A# B), "C", "m") == "C#"
end
@tag :pending
test "with full tone interval" do
assert ScaleGenerator.step(~w(C C# D D# E F F# G G# A A# B), "C", "M") == "D"
end
@tag :pending
test "with accidental interval" do
assert ScaleGenerator.step(~w(C C# D D# E F F# G G# A A# B), "C", "A") == "D#"
end
end
describe "generate chromatic scale" do
@tag :pending
test "starting with A" do
assert ScaleGenerator.chromatic_scale("A") == ~w(A A# B C C# D D# E F F# G G# A)
end
@tag :pending
test "starting with C" do
assert ScaleGenerator.chromatic_scale("C") == ~w(C C# D D# E F F# G G# A A# B C)
end
@tag :pending
test "starting with G" do
assert ScaleGenerator.chromatic_scale("G") == ~w(G G# A A# B C C# D D# E F F# G)
end
@tag :pending
test "works with with lowercase notes" do
assert ScaleGenerator.chromatic_scale("f#") == ~w(F# G G# A A# B C C# D D# E F F#)
end
end
describe "generate flat chromatic scale" do
@tag :pending
test "starting with A" do
assert ScaleGenerator.flat_chromatic_scale("A") == ~w(A Bb B C Db D Eb E F Gb G Ab A)
end
@tag :pending
test "starting with C" do
assert ScaleGenerator.flat_chromatic_scale("C") == ~w(C Db D Eb E F Gb G Ab A Bb B C)
end
@tag :pending
test "starting with G" do
assert ScaleGenerator.flat_chromatic_scale("G") == ~w(G Ab A Bb B C Db D Eb E F Gb G)
end
@tag :pending
test "works with with lowercase notes" do
assert ScaleGenerator.flat_chromatic_scale("Gb") == ~w(Gb G Ab A Bb B C Db D Eb E F Gb)
end
end
describe "find chromatic scale for flat tonics" do
@tag :pending
test "using F" do
assert ScaleGenerator.find_chromatic_scale("F") == ~w(F Gb G Ab A Bb B C Db D Eb E F)
end
@tag :pending
test "using Bb" do
assert ScaleGenerator.find_chromatic_scale("Bb") == ~w(Bb B C Db D Eb E F Gb G Ab A Bb)
end
@tag :pending
test "using Eb" do
assert ScaleGenerator.find_chromatic_scale("Eb") == ~w(Eb E F Gb G Ab A Bb B C Db D Eb)
end
@tag :pending
test "using Ab" do
assert ScaleGenerator.find_chromatic_scale("Ab") == ~w(Ab A Bb B C Db D Eb E F Gb G Ab)
end
@tag :pending
test "using Db" do
assert ScaleGenerator.find_chromatic_scale("Db") == ~w(Db D Eb E F Gb G Ab A Bb B C Db)
end
@tag :pending
test "using Gb" do
assert ScaleGenerator.find_chromatic_scale("Gb") == ~w(Gb G Ab A Bb B C Db D Eb E F Gb)
end
@tag :pending
test "using d" do
assert ScaleGenerator.find_chromatic_scale("d") == ~w(D Eb E F Gb G Ab A Bb B C Db D)
end
@tag :pending
test "using g" do
assert ScaleGenerator.find_chromatic_scale("g") == ~w(G Ab A Bb B C Db D Eb E F Gb G)
end
@tag :pending
test "using c" do
assert ScaleGenerator.find_chromatic_scale("c") == ~w(C Db D Eb E F Gb G Ab A Bb B C)
end
@tag :pending
test "using f" do
assert ScaleGenerator.find_chromatic_scale("f") == ~w(F Gb G Ab A Bb B C Db D Eb E F)
end
@tag :pending
test "using bb" do
assert ScaleGenerator.find_chromatic_scale("bb") == ~w(Bb B C Db D Eb E F Gb G Ab A Bb)
end
@tag :pending
test "using eb" do
assert ScaleGenerator.find_chromatic_scale("eb") == ~w(Eb E F Gb G Ab A Bb B C Db D Eb)
end
end
describe "find chromatic scale for non-flat tonics" do
@tag :pending
test "using A" do
assert ScaleGenerator.find_chromatic_scale("A") == ~w(A A# B C C# D D# E F F# G G# A)
end
@tag :pending
test "using A#" do
assert ScaleGenerator.find_chromatic_scale("A#") == ~w(A# B C C# D D# E F F# G G# A A#)
end
@tag :pending
test "using B" do
assert ScaleGenerator.find_chromatic_scale("B") == ~w(B C C# D D# E F F# G G# A A# B)
end
@tag :pending
test "using C" do
assert ScaleGenerator.find_chromatic_scale("C") == ~w(C C# D D# E F F# G G# A A# B C)
end
@tag :pending
test "using C#" do
assert ScaleGenerator.find_chromatic_scale("C#") == ~w(C# D D# E F F# G G# A A# B C C#)
end
@tag :pending
test "using D" do
assert ScaleGenerator.find_chromatic_scale("D") == ~w(D D# E F F# G G# A A# B C C# D)
end
@tag :pending
test "using D#" do
assert ScaleGenerator.find_chromatic_scale("D#") == ~w(D# E F F# G G# A A# B C C# D D#)
end
@tag :pending
test "using E" do
assert ScaleGenerator.find_chromatic_scale("E") == ~w(E F F# G G# A A# B C C# D D# E)
end
@tag :pending
test "using F#" do
assert ScaleGenerator.find_chromatic_scale("F#") == ~w(F# G G# A A# B C C# D D# E F F#)
end
@tag :pending
test "using G" do
assert ScaleGenerator.find_chromatic_scale("G") == ~w(G G# A A# B C C# D D# E F F# G)
end
@tag :pending
test "using G#" do
assert ScaleGenerator.find_chromatic_scale("G#") == ~w(G# A A# B C C# D D# E F F# G G#)
end
end
describe "generate scale from tonic and pattern" do
@tag :pending
test "C Major scale" do
assert ScaleGenerator.scale("C", @major_scale_pattern) == ~w(C D E F G A B C)
end
@tag :pending
test "G Major scale" do
assert ScaleGenerator.scale("G", @major_scale_pattern) == ~w(G A B C D E F# G)
end
@tag :pending
test "f# minor scale" do
assert ScaleGenerator.scale("f#", @minor_scale_pattern) == ~w(F# G# A B C# D E F#)
end
@tag :pending
test "b flat minor scale" do
assert ScaleGenerator.scale("bb", @minor_scale_pattern) == ~w(Bb C Db Eb F Gb Ab Bb)
end
@tag :pending
test "D Dorian scale" do
assert ScaleGenerator.scale("d", @dorian_scale_pattern) == ~w(D E F G A B C D)
end
@tag :pending
test "E flat Mixolydian scale" do
assert ScaleGenerator.scale("Eb", @mixolydian_scale_pattern) == ~w(Eb F G Ab Bb C Db Eb)
end
@tag :pending
test "a Lydian scale" do
assert ScaleGenerator.scale("a", @lydian_scale_pattern) == ~w(A B C# D# E F# G# A)
end
@tag :pending
test "e Phrygian scale" do
assert ScaleGenerator.scale("e", @phrygian_scale_pattern) == ~w(E F G A B C D E)
end
@tag :pending
test "g Locrian scale" do
assert ScaleGenerator.scale("g", @locrian_scale_pattern) == ~w(G Ab Bb C Db Eb F G)
end
@tag :pending
test "d Harmonic minor scale" do
assert ScaleGenerator.scale("d", @harmonic_minor_scale_pattern) == ~w(D E F G A Bb Db D)
end
@tag :pending
test "C Melodic minor scale" do
assert ScaleGenerator.scale("C", @melodic_minor_scale_pattern) == ~w(C D D# F G A B C)
end
@tag :pending
test "C Octatonic scale" do
assert ScaleGenerator.scale("C", @octatonic_scale_pattern) == ~w(C D D# F F# G# A B C)
end
@tag :pending
test "D flat Hexatonic scale" do
assert ScaleGenerator.scale("Db", @hexatonic_scale_pattern) == ~w(Db Eb F G A B Db)
end
@tag :pending
test "A Pentatonic scale" do
assert ScaleGenerator.scale("A", @pentatonic_scale_pattern) == ~w(A B C# E F# A)
end
@tag :pending
test "G Enigmatic scale" do
assert ScaleGenerator.scale("G", @enigmatic_scale_pattern) == ~w(G G# B C# D# F F# G)
end
end
end
| 29.039007 | 94 | 0.618146 |
798823bd5bfdb219c8eb3df83000ad6347a1974c | 2,213 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/live_chat_super_chat_details.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/you_tube/lib/google_api/you_tube/v3/model/live_chat_super_chat_details.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/live_chat_super_chat_details.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Model.LiveChatSuperChatDetails do
@moduledoc """
## Attributes
* `amountDisplayString` (*type:* `String.t`, *default:* `nil`) - A rendered string that displays the fund amount and currency to the user.
* `amountMicros` (*type:* `String.t`, *default:* `nil`) - The amount purchased by the user, in micros (1,750,000 micros = 1.75).
* `currency` (*type:* `String.t`, *default:* `nil`) - The currency in which the purchase was made.
* `tier` (*type:* `integer()`, *default:* `nil`) - The tier in which the amount belongs. Lower amounts belong to lower tiers. The lowest tier is 1.
* `userComment` (*type:* `String.t`, *default:* `nil`) - The comment added by the user to this Super Chat event.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:amountDisplayString => String.t(),
:amountMicros => String.t(),
:currency => String.t(),
:tier => integer(),
:userComment => String.t()
}
field(:amountDisplayString)
field(:amountMicros)
field(:currency)
field(:tier)
field(:userComment)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.LiveChatSuperChatDetails do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.LiveChatSuperChatDetails.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.LiveChatSuperChatDetails do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.508475 | 151 | 0.70357 |
79883505a90e65ea290936a9467b7e926b500730 | 1,553 | ex | Elixir | clients/dataflow/lib/google_api/dataflow/v1b3/model/launch_template_response.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/launch_template_response.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/launch_template_response.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Dataflow.V1b3.Model.LaunchTemplateResponse do
@moduledoc """
Response to the request to launch a template.
## Attributes
- job (Job): The job that was launched, if the request was not a dry run and the job was successfully launched. Defaults to: `null`.
"""
defstruct [
:"job"
]
end
defimpl Poison.Decoder, for: GoogleApi.Dataflow.V1b3.Model.LaunchTemplateResponse do
import GoogleApi.Dataflow.V1b3.Deserializer
def decode(value, options) do
value
|> deserialize(:"job", :struct, GoogleApi.Dataflow.V1b3.Model.Job, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataflow.V1b3.Model.LaunchTemplateResponse do
def encode(value, options) do
GoogleApi.Dataflow.V1b3.Deserializer.serialize_non_nil(value, options)
end
end
| 32.354167 | 134 | 0.755956 |
79883b9fa6a65fdcef7dcafd85a77a15590ed73f | 1,411 | ex | Elixir | lib/possible_unused_methods/token_locator.ex | joshuaclayton/possible_unused_methods | dd9f01d41f9433a6be8cc55e6de8d2f6c038c9dd | [
"MIT"
] | 4 | 2016-01-25T17:20:02.000Z | 2019-05-20T15:34:24.000Z | lib/possible_unused_methods/token_locator.ex | joshuaclayton/possible_unused_methods | dd9f01d41f9433a6be8cc55e6de8d2f6c038c9dd | [
"MIT"
] | null | null | null | lib/possible_unused_methods/token_locator.ex | joshuaclayton/possible_unused_methods | dd9f01d41f9433a6be8cc55e6de8d2f6c038c9dd | [
"MIT"
] | 1 | 2019-06-19T16:05:04.000Z | 2019-06-19T16:05:04.000Z | defmodule PossibleUnusedMethods.TokenLocator do
@moduledoc """
Searches a given codebase for a list of tokens using The Silver Searcher
(https://github.com/ggreer/the_silver_searcher).
This returns a map with tokens mapped to a map of files and the number of
occurrences per file:
%{
"first_name" => %{
"app/models/user.rb" => 1,
"app/views/users/_user.html.erb" => 1
"app/views/users/_form.html.erb" => 2
},
"last_name" => %{
"app/models/user.rb" => 1,
"app/views/users/_user.html.erb" => 1
"app/views/users/_form.html.erb" => 2
}
}
NOTICE:
This uses :os.cmd to run the command in raw form and is interpolating
user-generated values. Use at your own risk.
"""
def run(items) do
items
|> Enum.reject(fn(item) -> item == "" end)
|> build_matches
end
defp build_matches(items) do
items
|> Enum.reduce(%{}, fn(item, acc) ->
put_in acc, [item], matches_for(item)
end)
end
defp matches_for(item) do
:os.cmd('ag "#{item}" -c -Q')
|> to_string
|> String.strip
|> String.split("\n")
|> Enum.reject(fn(item) -> item == "" end)
|> matches_to_map
end
defp matches_to_map(matches) do
matches
|> Enum.reduce(%{}, fn(item, acc) ->
[line | count] = item |> String.split(":")
put_in(acc, [line], count |> Enum.at(0) |> String.to_integer)
end)
end
end
| 24.327586 | 75 | 0.603118 |
7988428159c680ac3de3763d7adb21dc748397b5 | 5,976 | exs | Elixir | test/twirp/client_test.exs | daskycodes/twirp-elixir | 51b701111a3b33601980703417388cd099dc7e44 | [
"Apache-2.0"
] | 30 | 2019-11-03T16:30:13.000Z | 2020-06-23T19:38:53.000Z | test/twirp/client_test.exs | daskycodes/twirp-elixir | 51b701111a3b33601980703417388cd099dc7e44 | [
"Apache-2.0"
] | 16 | 2020-03-13T17:56:16.000Z | 2020-06-11T10:40:02.000Z | test/twirp/client_test.exs | daskycodes/twirp-elixir | 51b701111a3b33601980703417388cd099dc7e44 | [
"Apache-2.0"
] | 3 | 2019-12-05T16:43:15.000Z | 2020-05-11T21:34:44.000Z | defmodule Twirp.ClientTest do
use ExUnit.Case, async: false
alias Twirp.Error
alias Twirp.Test.Req
alias Twirp.Test.Resp
alias Twirp.Test.EchoClient, as: Client
setup tags do
service = Bypass.open()
base_url = "http://localhost:#{service.port}"
content_type = tags[:client_type] || :proto
{:ok, _} = start_supervised({Client, url: base_url, content_type: content_type})
{:ok, service: service}
end
test "generated clients have rpc functions defined on them" do
assert {:echo, 2} in Client.__info__(:functions)
end
test "makes an http call if the rpc is defined", %{service: service} do
Bypass.expect(service, fn conn ->
assert Plug.Conn.get_req_header(conn, "content-type") == ["application/protobuf"]
{:ok, body, conn} = Plug.Conn.read_body(conn)
assert %Req{msg: "test"} == Req.decode(body)
body = Resp.encode(Resp.new(msg: "test"))
conn
|> Plug.Conn.put_resp_content_type("application/protobuf")
|> Plug.Conn.resp(200, body)
end)
resp = Client.echo(Req.new(msg: "test"))
assert {:ok, Resp.new(msg: "test")} == resp
end
@tag client_type: :json
test "json encoding and decoding", %{service: service} do
Bypass.expect(service, fn conn ->
assert Plug.Conn.get_req_header(conn, "content-type") == ["application/json"]
{:ok, body, conn} = Plug.Conn.read_body(conn)
assert %{"msg" => "Test"} == Jason.decode!(body)
conn
|> Plug.Conn.put_resp_content_type("application/json")
|> Plug.Conn.resp(200, ~s|{"msg": "Test"}|)
end)
assert {:ok, resp} = Client.echo(Req.new(msg: "Test"))
assert match?(%Resp{}, resp)
assert resp.msg == "Test"
end
test "incorrect headers are returned", %{service: service} do
Bypass.expect(service, fn conn ->
conn
|> Plug.Conn.put_resp_content_type("application/msgpack")
|> Plug.Conn.resp(200, ~s|foo|)
end)
assert {:error, resp} = Client.echo(Req.new(msg: "test"))
assert match?(%Error{code: :internal}, resp)
end
test "no headers are returned", %{service: service} do
Bypass.expect(service, fn conn ->
conn
|> Plug.Conn.resp(200, ~s|foo|)
end)
assert {:error, resp} = Client.echo(Req.new(msg: "test"))
assert match?(%Error{code: :internal}, resp)
assert resp.msg == ~s|Expected response Content-Type "application/protobuf" but found nil|
end
test "error is not json", %{service: service} do
Bypass.expect(service, fn conn ->
conn
|> Plug.Conn.send_resp(503, ~s|plain text error|)
end)
assert {:error, resp} = Client.echo(Req.new(msg: "test"))
assert resp.code == :unavailable
assert resp.msg == "unavailable"
assert resp.meta["http_error_from_intermediary"] == "true"
assert resp.meta["not_a_twirp_error_because"] == "Response is not JSON"
assert resp.meta["body"] == "plain text error"
end
test "error has no code", %{service: service} do
Bypass.expect(service, fn conn ->
conn
|> Plug.Conn.put_resp_content_type("application/json")
|> Plug.Conn.send_resp(500, ~s|{"msg": "I have no code"}|)
end)
assert {:error, resp} = Client.echo(Req.new(msg: "test"))
assert resp.code == :unknown
assert resp.msg == "unknown"
assert resp.meta["http_error_from_intermediary"] == "true"
assert resp.meta["not_a_twirp_error_because"] ==
"Response is JSON but it has no \"code\" attribute"
end
test "error has incorrect code", %{service: service} do
Bypass.expect(service, fn conn ->
conn
|> Plug.Conn.put_resp_content_type("application/json")
|> Plug.Conn.send_resp(500, ~s|{"code": "keathley", "msg": "incorrect code"}|)
end)
assert {:error, resp} = Client.echo(Req.new(msg: "test"))
assert resp.code == :internal
assert resp.msg == "Invalid Twirp error code: keathley"
assert resp.meta["invalid_code"] == "keathley"
end
test "error has meta", %{service: service} do
Bypass.expect(service, fn conn ->
resp =
~s|{"code": "internal", "msg": "Internal Server Error", "meta": {"cause": "some exception"}}|
conn
|> Plug.Conn.put_resp_content_type("application/json")
|> Plug.Conn.send_resp(500, resp)
end)
assert {:error, resp} = Client.echo(Req.new(msg: "test"))
assert resp.code == :internal
assert resp.msg == "Internal Server Error"
assert resp.meta == %{"cause" => "some exception"}
end
test "redirect errors", %{service: service} do
Bypass.expect(service, fn conn ->
url = "https://keathley.io"
conn
|> Plug.Conn.put_resp_header("location", url)
|> Plug.Conn.send_resp(302, url)
end)
assert {:error, resp} = Client.echo(Req.new(msg: "test"))
assert match?(%Error{code: :internal}, resp)
assert resp.meta["http_error_from_intermediary"] == "true"
assert resp.meta["not_a_twirp_error_because"] == "Redirects not allowed on Twirp requests"
end
test "service is down", %{service: service} do
Bypass.down(service)
assert {:error, resp} = Client.echo(Req.new(msg: "test"))
assert resp.code == :unavailable
end
@tag :skip
test "clients are easy to stub" do
Twirp.Client.Stub.new(
echo: fn %{msg: "foo"} ->
Error.unavailable("test")
end
)
assert {:error, %Error{code: :unavailable}} = Client.echo(Req.new(msg: "foo"))
Twirp.Client.Stub.new(
echo: fn %{msg: "foo"} ->
Resp.new(msg: "foo")
end
)
assert {:ok, %Resp{msg: "foo"}} = Client.echo(Req.new(msg: "foo"))
assert_raise Twirp.Client.StubError, ~r/does not define/, fn ->
Twirp.Client.Stub.new()
Client.echo(Req.new(msg: "foo"))
end
assert_raise Twirp.Client.StubError, ~r/expected to return/, fn ->
Twirp.Client.Stub.new(
echo: fn _ ->
{:ok, Req.new(msg: "test")}
end
)
Client.echo(Req.new(msg: "foo"))
end
end
end
| 30.963731 | 101 | 0.626171 |
7988797f3e41e0d7ec1394d1165aa5e1d736e12b | 6,018 | exs | Elixir | .credo.exs | pay-it-off/dwolla-elixir | ea71c44ed1b02be5cf416225b872490a4f7e4bf9 | [
"MIT"
] | null | null | null | .credo.exs | pay-it-off/dwolla-elixir | ea71c44ed1b02be5cf416225b872490a4f7e4bf9 | [
"MIT"
] | null | null | null | .credo.exs | pay-it-off/dwolla-elixir | ea71c44ed1b02be5cf416225b872490a4f7e4bf9 | [
"MIT"
] | 1 | 2022-02-13T17:14:59.000Z | 2022-02-13T17:14:59.000Z | # This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any exec using `mix credo -C <name>`. If no exec name is given
# "default" is used.
#
name: "default",
#
# These are the files included in the analysis:
files: %{
#
# You can give explicit globs or simply directories.
# In the latter case `**/*.{ex,exs}` will be used.
#
included: ["lib/", "src/", "test/", "web/", "apps/"],
excluded: [~r"/_build/", ~r"/deps/"]
},
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
#
requires: [],
#
# If you want to enforce a style guide and need a more traditional linting
# experience, you can change `strict` to `true` below:
#
strict: false,
#
# If you want to use uncolored output by default, you can change `color`
# to `false` below:
#
color: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
#
## Consistency Checks
#
{Credo.Check.Consistency.ExceptionNames},
{Credo.Check.Consistency.LineEndings},
{Credo.Check.Consistency.ParameterPatternMatching},
{Credo.Check.Consistency.SpaceAroundOperators},
{Credo.Check.Consistency.SpaceInParentheses},
{Credo.Check.Consistency.TabsOrSpaces},
#
## Design Checks
#
# You can customize the priority of any check
# Priority values are: `low, normal, high, higher`
#
{Credo.Check.Design.AliasUsage, priority: :low},
# For some checks, you can also set other parameters
#
# If you don't want the `setup` and `test` macro calls in ExUnit tests
# or the `schema` macro in Ecto schemas to trigger DuplicatedCode, just
# set the `excluded_macros` parameter to `[:schema, :setup, :test]`.
#
{Credo.Check.Design.DuplicatedCode, excluded_macros: []},
# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
#
{Credo.Check.Design.TagTODO, exit_status: 2},
{Credo.Check.Design.TagFIXME},
#
## Readability Checks
#
{Credo.Check.Readability.AliasOrder},
{Credo.Check.Readability.FunctionNames},
{Credo.Check.Readability.LargeNumbers},
{Credo.Check.Readability.MaxLineLength, priority: :low, max_length: 80},
{Credo.Check.Readability.ModuleAttributeNames},
{Credo.Check.Readability.ModuleDoc},
{Credo.Check.Readability.ModuleNames},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs},
{Credo.Check.Readability.ParenthesesInCondition},
{Credo.Check.Readability.PredicateFunctionNames},
{Credo.Check.Readability.PreferImplicitTry},
{Credo.Check.Readability.RedundantBlankLines},
{Credo.Check.Readability.StringSigils},
{Credo.Check.Readability.TrailingBlankLine},
{Credo.Check.Readability.TrailingWhiteSpace},
{Credo.Check.Readability.VariableNames},
{Credo.Check.Readability.Semicolons},
{Credo.Check.Readability.SpaceAfterCommas},
#
## Refactoring Opportunities
#
{Credo.Check.Refactor.DoubleBooleanNegation},
{Credo.Check.Refactor.CondStatements},
{Credo.Check.Refactor.CyclomaticComplexity},
{Credo.Check.Refactor.FunctionArity},
{Credo.Check.Refactor.LongQuoteBlocks},
{Credo.Check.Refactor.MatchInCondition},
{Credo.Check.Refactor.NegatedConditionsInUnless},
{Credo.Check.Refactor.NegatedConditionsWithElse},
{Credo.Check.Refactor.Nesting},
{Credo.Check.Refactor.PipeChainStart,
excluded_argument_types: [:atom, :binary, :fn, :keyword], excluded_functions: []},
{Credo.Check.Refactor.UnlessWithElse},
#
## Warnings
#
{Credo.Check.Warning.BoolOperationOnSameValues},
{Credo.Check.Warning.ExpensiveEmptyEnumCheck},
{Credo.Check.Warning.IExPry},
{Credo.Check.Warning.IoInspect},
{Credo.Check.Warning.OperationOnSameValues},
{Credo.Check.Warning.OperationWithConstantResult},
{Credo.Check.Warning.UnusedEnumOperation},
{Credo.Check.Warning.UnusedFileOperation},
{Credo.Check.Warning.UnusedKeywordOperation},
{Credo.Check.Warning.UnusedListOperation},
{Credo.Check.Warning.UnusedPathOperation},
{Credo.Check.Warning.UnusedRegexOperation},
{Credo.Check.Warning.UnusedStringOperation},
{Credo.Check.Warning.UnusedTupleOperation},
{Credo.Check.Warning.RaiseInsideRescue},
#
# Controversial and experimental checks (opt-in, just remove `, false`)
#
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem, false},
{Credo.Check.Refactor.VariableRebinding, false},
{Credo.Check.Warning.MapGetUnsafePass, false},
{Credo.Check.Consistency.MultiAliasImportRequireUse, false},
#
# Deprecated checks (these will be deleted after a grace period)
#
{Credo.Check.Readability.Specs, false}
#
# Custom checks can be created using `mix credo.gen.check`.
#
]
}
]
}
| 37.6125 | 91 | 0.633267 |
7988917238112f78e3c75e902b97fedd99e51c40 | 4,448 | ex | Elixir | clients/plus_domains/lib/google_api/plus_domains/v1/model/activity_object_attachments.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/plus_domains/lib/google_api/plus_domains/v1/model/activity_object_attachments.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/plus_domains/lib/google_api/plus_domains/v1/model/activity_object_attachments.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachments do
@moduledoc """
## Attributes
* `content` (*type:* `String.t`, *default:* `nil`) - If the attachment is an article, this property contains a snippet of text from the article. It can also include descriptions for other types.
* `displayName` (*type:* `String.t`, *default:* `nil`) - The title of the attachment, such as a photo caption or an article title.
* `embed` (*type:* `GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachmentsEmbed.t`, *default:* `nil`) - If the attachment is a video, the embeddable link.
* `fullImage` (*type:* `GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachmentsFullImage.t`, *default:* `nil`) - The full image URL for photo attachments.
* `id` (*type:* `String.t`, *default:* `nil`) - The ID of the attachment.
* `image` (*type:* `GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachmentsImage.t`, *default:* `nil`) - The preview image for photos or videos.
* `objectType` (*type:* `String.t`, *default:* `nil`) - The type of media object. Possible values include, but are not limited to, the following values:
- "photo" - A photo.
- "album" - A photo album.
- "video" - A video.
- "article" - An article, specified by a link.
* `previewThumbnails` (*type:* `list(GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachmentsPreviewThumbnails.t)`, *default:* `nil`) - When previewing, these are the optional thumbnails for the post. When posting an article, choose one by setting the attachment.image.url property. If you don't choose one, one will be chosen for you.
* `thumbnails` (*type:* `list(GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachmentsThumbnails.t)`, *default:* `nil`) - If the attachment is an album, this property is a list of potential additional thumbnails from the album.
* `url` (*type:* `String.t`, *default:* `nil`) - The link to the attachment, which should be of type text/html.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:content => String.t(),
:displayName => String.t(),
:embed => GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachmentsEmbed.t(),
:fullImage => GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachmentsFullImage.t(),
:id => String.t(),
:image => GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachmentsImage.t(),
:objectType => String.t(),
:previewThumbnails =>
list(GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachmentsPreviewThumbnails.t()),
:thumbnails =>
list(GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachmentsThumbnails.t()),
:url => String.t()
}
field(:content)
field(:displayName)
field(:embed, as: GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachmentsEmbed)
field(:fullImage, as: GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachmentsFullImage)
field(:id)
field(:image, as: GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachmentsImage)
field(:objectType)
field(
:previewThumbnails,
as: GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachmentsPreviewThumbnails,
type: :list
)
field(
:thumbnails,
as: GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachmentsThumbnails,
type: :list
)
field(:url)
end
defimpl Poison.Decoder, for: GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachments do
def decode(value, options) do
GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachments.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.PlusDomains.V1.Model.ActivityObjectAttachments do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 48.879121 | 339 | 0.717176 |
79889940f8a9649d36638c6b8c3ebdccbd022bba | 302 | ex | Elixir | lib/events_dashboard/banking_details.ex | Hermanlangner/events_dashboard | 30d5d3666ac8057bfd91dd33131d8132c97cfec9 | [
"MIT"
] | null | null | null | lib/events_dashboard/banking_details.ex | Hermanlangner/events_dashboard | 30d5d3666ac8057bfd91dd33131d8132c97cfec9 | [
"MIT"
] | null | null | null | lib/events_dashboard/banking_details.ex | Hermanlangner/events_dashboard | 30d5d3666ac8057bfd91dd33131d8132c97cfec9 | [
"MIT"
] | null | null | null | defmodule EventsDashboard.BankingDetails do
use Ecto.Schema
import Ecto.Changeset
schema "banking_details" do
field :customer_id, :integer
field :account_number, :integer
end
def changeset(struct, params) do
struct
|> cast(params, [:account_number, :customer_id])
end
end
| 20.133333 | 52 | 0.728477 |
7988c0c40163b396233eb98b1258c0f5ee4370e3 | 458 | ex | Elixir | apps/gitgud_web/test/support/data_factory.ex | rogervezaro/gitgud | 6656f8c2df16817a6c5325fb4c18b03f9d3f7140 | [
"MIT"
] | 449 | 2018-03-06T01:05:55.000Z | 2022-03-23T21:03:56.000Z | apps/gitgud_web/test/support/data_factory.ex | rogervezaro/gitgud | 6656f8c2df16817a6c5325fb4c18b03f9d3f7140 | [
"MIT"
] | 69 | 2018-03-06T09:26:41.000Z | 2022-03-21T22:43:09.000Z | apps/gitgud_web/test/support/data_factory.ex | rogervezaro/gitgud | 6656f8c2df16817a6c5325fb4c18b03f9d3f7140 | [
"MIT"
] | 41 | 2018-03-06T01:06:07.000Z | 2021-11-21T17:55:04.000Z | defmodule GitGud.Web.DataFactory do
@moduledoc """
This module provides functions to generate all kind of test data.
"""
@doc false
defmacro __using__(_opts) do
quote do
def factory(name, params \\ []) do
try do
apply(unquote(__MODULE__), name, List.wrap(params))
rescue
UndefinedFunctionError ->
apply(GitGud.DataFactory, name, List.wrap(params))
end
end
end
end
end
| 21.809524 | 67 | 0.617904 |
7988ef89dc49e5144f9f62251c96a25f5010455e | 1,649 | ex | Elixir | apps/flair/lib/flair/durations.ex | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 26 | 2019-09-20T23:54:45.000Z | 2020-08-20T14:23:32.000Z | apps/flair/lib/flair/durations.ex | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 757 | 2019-08-15T18:15:07.000Z | 2020-09-18T20:55:31.000Z | apps/flair/lib/flair/durations.ex | jakeprem/smartcitiesdata | da309ac0d2261527278951cbae88604455207589 | [
"Apache-2.0"
] | 9 | 2019-11-12T16:43:46.000Z | 2020-03-25T16:23:16.000Z | defmodule Flair.Durations do
@moduledoc """
Calculate durations.
This is done first by reducing multiple data messages to an accumulater,
and then second by calculating the durations for those aggregate metrics.
"""
alias SmartCity.Data
@doc """
Aggregates data messages into a mapping of dataset ids to timing data lists.
"""
@spec reducer(Data.t(), map()) :: %{String.t() => [SmartCity.Data.Timing.t()]}
def reducer(%Data{dataset_id: id, operational: %{timing: timing}}, acc) do
Map.update(acc, id, List.wrap(timing), fn x ->
timing ++ x
end)
end
@doc """
Converts raw individual timing metrics into aggregated timing statistics by app and label.
"""
@spec calculate_durations({String.t(), map()}) :: {String.t(), map()}
def calculate_durations({dataset_id, raw_metrics}) do
calculated_metrics =
raw_metrics
|> Enum.group_by(&durations_key_fn/1, &durations_val_fn/1)
|> Enum.map(&get_durations/1)
|> Enum.into(Map.new())
{dataset_id, calculated_metrics}
end
defp durations_key_fn(%{app: app, label: label}), do: {app, label}
defp durations_val_fn(%{start_time: start_time, end_time: end_time}) do
{:ok, start_time, 0} = DateTime.from_iso8601(start_time)
{:ok, end_time, 0} = DateTime.from_iso8601(end_time)
DateTime.diff(end_time, start_time, :millisecond)
end
defp get_durations({key, durations}) do
{key,
%{
count: length(durations),
max: Enum.max(durations),
min: Enum.min(durations),
average: Enum.sum(durations) / length(durations),
stdev: Statistics.stdev(durations)
}}
end
end
| 30.537037 | 92 | 0.67071 |
7988faedd410ea429173ed0d31173d22c5194efb | 728 | ex | Elixir | lib/boots_of_speed_web/gettext.ex | Baradoy/boots_of_speed | 6004da8fb43e15cb7443e0bd00fe70c936a41015 | [
"MIT"
] | 1 | 2019-04-18T05:10:21.000Z | 2019-04-18T05:10:21.000Z | lib/boots_of_speed_web/gettext.ex | Baradoy/boots_of_speed | 6004da8fb43e15cb7443e0bd00fe70c936a41015 | [
"MIT"
] | 3 | 2020-07-16T17:41:44.000Z | 2021-05-08T03:39:49.000Z | lib/boots_of_speed_web/gettext.ex | Baradoy/boots_of_speed | 6004da8fb43e15cb7443e0bd00fe70c936a41015 | [
"MIT"
] | null | null | null | defmodule BootsOfSpeedWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import BootsOfSpeedWeb.Gettext
# Simple translation
gettext("Here is the string to translate")
# Plural translation
ngettext("Here is the string to translate",
"Here are the strings to translate",
3)
# Domain-based translation
dgettext("errors", "Here is the error message to translate")
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :boots_of_speed
end
| 29.12 | 72 | 0.68544 |
79890cfc26daaaab556110e18ade8bcdec8f30cd | 830 | exs | Elixir | priv/repo/migrations/20140623215331_add_package_owners_table.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 691 | 2017-03-08T09:15:45.000Z | 2022-03-23T22:04:47.000Z | priv/repo/migrations/20140623215331_add_package_owners_table.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 491 | 2017-03-07T12:58:42.000Z | 2022-03-29T23:32:54.000Z | priv/repo/migrations/20140623215331_add_package_owners_table.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 200 | 2017-03-12T23:03:39.000Z | 2022-03-05T17:55:52.000Z | defmodule Hexpm.Repo.Migrations.AddPackageOwnersTable do
use Ecto.Migration
def up() do
execute("""
CREATE TABLE package_owners (
id serial PRIMARY KEY,
package_id integer REFERENCES packages,
owner_id integer REFERENCES users)
""")
execute("CREATE INDEX ON package_owners (package_id)")
execute("""
INSERT INTO package_owners (package_id, owner_id)
SELECT id, owner_id FROM packages
""")
execute("ALTER TABLE packages DROP owner_id")
end
def down() do
execute("ALTER TABLE packages ADD owner_id integer REFERENCES users")
execute("""
UPDATE packages SET owner_id = package_owners.owner_id
FROM package_owners
WHERE package_owners.package_id = id
""")
execute("DROP TABLE IF EXISTS package_owners")
end
end
| 24.411765 | 73 | 0.679518 |
79890fff84b3d0989487d213e0b409a92696cfc8 | 1,113 | exs | Elixir | elixir-22/config/config.exs | rikkus/adventofcode2016 | dceb9af8e3f1f460109bb881e9d9d33fb0892d6e | [
"MIT"
] | null | null | null | elixir-22/config/config.exs | rikkus/adventofcode2016 | dceb9af8e3f1f460109bb881e9d9d33fb0892d6e | [
"MIT"
] | null | null | null | elixir-22/config/config.exs | rikkus/adventofcode2016 | dceb9af8e3f1f460109bb881e9d9d33fb0892d6e | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :aoc22, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:aoc22, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 35.903226 | 73 | 0.750225 |
798917c6d5cf2f5f06a4c7c9486647c80d80e673 | 26,095 | ex | Elixir | lib/elixir/lib/task.ex | Zanadar/elixir | ef967b3e07f189b9cae37d5b12bd7258619b3e15 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/task.ex | Zanadar/elixir | ef967b3e07f189b9cae37d5b12bd7258619b3e15 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/task.ex | Zanadar/elixir | ef967b3e07f189b9cae37d5b12bd7258619b3e15 | [
"Apache-2.0"
] | null | null | null | defmodule Task do
@moduledoc """
Conveniences for spawning and awaiting tasks.
Tasks are processes meant to execute one particular
action throughout their lifetime, often with little or no
communication with other processes. The most common use case
for tasks is to convert sequential code into concurrent code
by computing a value asynchronously:
task = Task.async(fn -> do_some_work() end)
res = do_some_other_work()
res + Task.await(task)
Tasks spawned with `async` can be awaited on by their caller
process (and only their caller) as shown in the example above.
They are implemented by spawning a process that sends a message
to the caller once the given computation is performed.
Besides `async/1` and `await/2`, tasks can also be
started as part of a supervision tree and dynamically spawned
on remote nodes. We will explore all three scenarios next.
## async and await
One of the common uses of tasks is to convert sequential code
into concurrent code with `Task.async/1` while keeping its semantics.
When invoked, a new process will be created, linked and monitored
by the caller. Once the task action finishes, a message will be sent
to the caller with the result.
`Task.await/2` is used to read the message sent by the task.
There are two important things to consider when using `async`:
1. If you are using async tasks, you **must await** a reply
as they are *always* sent. If you are not expecting a reply,
consider using `Task.start_link/1` detailed below.
2. async tasks link the caller and the spawned process. This
means that, if the caller crashes, the task will crash
too and vice-versa. This is on purpose: if the process
meant to receive the result no longer exists, there is
no purpose in completing the computation.
If this is not desired, use `Task.start/1` or consider starting
the task under a `Task.Supervisor` using `async_nolink` or
`start_child`.
`Task.yield/2` is an alternative to `await/2` where the caller will
temporarily block, waiting until the task replies or crashes. If the
result does not arrive within the timeout, it can be called again at a
later moment. This allows checking for the result of a task multiple
times. If a reply does not arrive within the desired time,
`Task.shutdown/2` can be used to stop the task.
## Supervised tasks
It is also possible to spawn a task under a supervisor:
import Supervisor.Spec
children = [
#
worker(Task, [fn -> IO.puts "ok" end])
]
Internally the supervisor will invoke `Task.start_link/1`.
Since these tasks are supervised and not directly linked to
the caller, they cannot be awaited on. Note `start_link/1`,
unlike `async/1`, returns `{:ok, pid}` (which is
the result expected by supervision trees).
By default, most supervision strategies will try to restart
a worker after it exits regardless of the reason. If you design
the task to terminate normally (as in the example with `IO.puts/2`
above), consider passing `restart: :transient` in the options
to `Supervisor.Spec.worker/3`.
## Dynamically supervised tasks
The `Task.Supervisor` module allows developers to dynamically
create multiple supervised tasks.
A short example is:
{:ok, pid} = Task.Supervisor.start_link()
task = Task.Supervisor.async(pid, fn ->
# Do something
end)
Task.await(task)
However, in the majority of cases, you want to add the task supervisor
to your supervision tree:
import Supervisor.Spec
children = [
supervisor(Task.Supervisor, [[name: MyApp.TaskSupervisor]])
]
Now you can dynamically start supervised tasks:
Task.Supervisor.start_child(MyApp.TaskSupervisor, fn ->
# Do something
end)
Or even use the async/await pattern:
Task.Supervisor.async(MyApp.TaskSupervisor, fn ->
# Do something
end) |> Task.await()
Finally, check `Task.Supervisor` for other supported operations.
## Distributed tasks
Since Elixir provides a Task supervisor, it is easy to use one
to dynamically spawn tasks across nodes:
# On the remote node
Task.Supervisor.start_link(name: MyApp.DistSupervisor)
# On the client
Task.Supervisor.async({MyApp.DistSupervisor, :remote@local},
MyMod, :my_fun, [arg1, arg2, arg3])
Note that, when working with distributed tasks, one should use the `Task.Supervisor.async/4` function
that expects explicit module, function and arguments, instead of `Task.Supervisor.async/2` that
works with anonymous functions. That's because anonymous functions expect
the same module version to exist on all involved nodes. Check the `Agent` module
documentation for more information on distributed processes as the limitations
described there apply to the whole ecosystem.
"""
@doc """
The Task struct.
It contains these fields:
* `:pid` - the PID of the task process; `nil` if the task does
not use a task process
* `:ref` - the task monitor reference
* `:owner` - the PID of the process that started the task
"""
defstruct pid: nil, ref: nil, owner: nil
@type t :: %__MODULE__{}
@doc """
Starts a task as part of a supervision tree.
"""
@spec start_link((() -> any)) :: {:ok, pid}
def start_link(fun) do
start_link(:erlang, :apply, [fun, []])
end
@doc """
Starts a task as part of a supervision tree.
"""
@spec start_link(module, atom, [term]) :: {:ok, pid}
def start_link(mod, fun, args) do
Task.Supervised.start_link(get_info(self()), {mod, fun, args})
end
@doc """
Starts a task.
This is only used when the task is used for side-effects
(i.e. no interest in the returned result) and it should not
be linked to the current process.
"""
@spec start((() -> any)) :: {:ok, pid}
def start(fun) do
start(:erlang, :apply, [fun, []])
end
@doc """
Starts a task.
This is only used when the task is used for side-effects
(i.e. no interest in the returned result) and it should not
be linked to the current process.
"""
@spec start(module, atom, [term]) :: {:ok, pid}
def start(mod, fun, args) do
Task.Supervised.start(get_info(self()), {mod, fun, args})
end
@doc """
Starts a task that must be awaited on.
This function spawns a process that is linked to and monitored
by the caller process. A `Task` struct is returned containing
the relevant information.
Read the `Task` module documentation for more info on general
usage of `async/1` and `async/3`.
See also `async/3`.
"""
@spec async((() -> any)) :: t
def async(fun) do
async(:erlang, :apply, [fun, []])
end
@doc """
Starts a task that must be awaited on.
A `Task` struct is returned containing the relevant information.
Developers must eventually call `Task.await/2` or `Task.yield/2`
followed by `Task.shutdown/2` on the returned task.
Read the `Task` module documentation for more info on general
usage of `async/1` and `async/3`.
## Linking
This function spawns a process that is linked to and monitored
by the caller process. The linking part is important because it
aborts the task if the parent process dies. It also guarantees
the code before async/await has the same properties after you
add the async call. For example, imagine you have this:
x = heavy_fun()
y = some_fun()
x + y
Now you want to make the `heavy_fun()` async:
x = Task.async(&heavy_fun/0)
y = some_fun()
Task.await(x) + y
As before, if `heavy_fun/0` fails, the whole computation will
fail, including the parent process. If you don't want the task
to fail then you must change the `heavy_fun/0` code in the
same way you would achieve it if you didn't have the async call.
For example, to either return `{:ok, val} | :error` results or,
in more extreme cases, by using `try/rescue`. In other words,
an asynchronous task should be thought of as an extension of a
process rather than a mechanism to isolate it from all errors.
If you don't want to link the caller to the task, then you
must use a supervised task with `Task.Supervisor` and call
`Task.Supervisor.async_nolink/2`.
In any case, avoid any of the following:
* Setting `:trap_exit` to `true` - trapping exits should be
used only in special circumstances as it would make your
process immune to not only exits from the task but from
any other processes.
Moreover, even when trapping exits, calling `await` will
still exit if the task has terminated without sending its
result back.
* Unlinking the task process started with `async`/`await`.
If you unlink the processes and the task does not belong
to any supervisor, you may leave dangling tasks in case
the parent dies.
## Message format
The reply sent by the task will be in the format `{ref, result}`,
where `ref` is the monitor reference held by the task struct
and `result` is the return value of the task function.
"""
@spec async(module, atom, [term]) :: t
def async(mod, fun, args) do
mfa = {mod, fun, args}
owner = self()
pid = Task.Supervised.spawn_link(owner, get_info(owner), mfa)
ref = Process.monitor(pid)
send(pid, {owner, ref})
%Task{pid: pid, ref: ref, owner: owner}
end
@doc """
Returns a stream that runs the given `module`, `function`, and `args`
concurrently on each item in `enumerable`.
Each item will be prepended to the given `args` and processed by its
own task. The tasks will be linked to an intermediate process that is
then linked to the current process. This means a failure in a task
terminates the current process and a failure in the current process
terminates all tasks.
When streamed, each task will emit `{:ok, val}` upon successful
completion or `{:exit, val}` if the caller is trapping exits. Results
are emitted in the same order as the original `enumerable`.
The level of concurrency can be controlled via the `:max_concurrency`
option and defaults to `System.schedulers_online/0`. A timeout
can also be given as an option representing the maximum amount of
time to wait without a task reply.
Finally, consider using `Task.Supervisor.async_stream/6` to start tasks
under a supervisor. If you find yourself trapping exits to handle exits
inside the async stream, consider using `Task.Supervisor.async_stream_nolink/6`
to start tasks that are not linked to the current process.
## Options
* `:max_concurrency` - sets the maximum number of tasks to run
at the same time. Defaults to `System.schedulers_online/0`.
* `:timeout` - the maximum amount of time (in milliseconds) each
task is allowed to execute for. Defaults to `5000`.
* `:on_timeout` - what do to when a task times out. The possible
values are:
* `:exit` (default) - the process that spawned the tasks exits.
* `:kill_task` - the task that timed out is killed. The value
emitted for that task is `{:exit, :timeout}`.
## Example
Let's build a stream and then enumerate it:
stream = Task.async_stream(collection, Mod, :expensive_fun, [])
Enum.to_list(stream)
The concurrency can be increased or decreased using the `:max_concurrency`
option. For example, if the tasks are IO heavy, the value can be increased:
max_concurrency = System.schedulers_online * 2
stream = Task.async_stream(collection, Mod, :expensive_fun, [], max_concurrency: max_concurrency)
Enum.to_list(stream)
"""
@spec async_stream(Enumerable.t, module, atom, [term], Keyword.t) :: Enumerable.t
def async_stream(enumerable, module, function, args, options \\ [])
when is_atom(module) and is_atom(function) and is_list(args) do
build_stream(enumerable, {module, function, args}, options)
end
@doc """
Returns a stream that runs the given function `fun` concurrently
on each item in `enumerable`.
Each `enumerable` item is passed as argument to the given function `fun` and
processed by its own task. The tasks will be linked to the current process,
similarly to `async/1`.
## Example
Count the codepoints in each string asynchronously, then add the counts together using reduce.
iex> strings = ["long string", "longer string", "there are many of these"]
iex> stream = Task.async_stream(strings, fn text -> text |> String.codepoints |> Enum.count end)
iex> Enum.reduce(stream, 0, fn {:ok, num}, acc -> num + acc end)
47
See `async_stream/5` for discussion, options, and more examples.
"""
@spec async_stream(Enumerable.t, (term -> term), Keyword.t) :: Enumerable.t
def async_stream(enumerable, fun, options \\ []) when is_function(fun, 1) do
build_stream(enumerable, fun, options)
end
defp build_stream(enumerable, fun, options) do
&Task.Supervised.stream(enumerable, &1, &2, fun, options, fn owner, mfa ->
{:link, Task.Supervised.spawn_link(owner, get_info(owner), mfa)}
end)
end
# Returns a tuple with the node where this is executed and either the
# registered name of the given pid or the pid of where this is executed. Used
# when exiting from tasks to print out from where the task was started.
defp get_info(pid) do
self_or_name =
case Process.info(pid, :registered_name) do
{:registered_name, []} -> self()
{:registered_name, name} -> name
end
{node(), self_or_name}
end
@doc """
Awaits a task reply and returns it.
A timeout, in milliseconds, can be given with default value
of `5000`. In case the task process dies, this function will
exit with the same reason as the task.
If the timeout is exceeded, `await` will exit; however,
the task will continue to run. When the calling process exits, its
exit signal will terminate the task if it is not trapping exits.
This function assumes the task's monitor is still active or the monitor's
`:DOWN` message is in the message queue. If it has been demonitored, or the
message already received, this function will wait for the duration of the
timeout awaiting the message.
This function can only be called once for any given task. If you want
to be able to check multiple times if a long-running task has finished
its computation, use `yield/2` instead.
## Compatibility with OTP behaviours
It is not recommended to `await` a long-running task inside an OTP
behaviour such as `GenServer`. Instead, you should match on the message
coming from a task inside your `GenServer.handle_info/2` callback.
## Examples
iex> task = Task.async(fn -> 1 + 1 end)
iex> Task.await(task)
2
"""
@spec await(t, timeout) :: term | no_return
def await(task, timeout \\ 5000)
def await(%Task{owner: owner} = task, _) when owner != self() do
raise ArgumentError, invalid_owner_error(task)
end
def await(%Task{ref: ref} = task, timeout) do
receive do
{^ref, reply} ->
Process.demonitor(ref, [:flush])
reply
{:DOWN, ^ref, _, proc, reason} ->
exit({reason(reason, proc), {__MODULE__, :await, [task, timeout]}})
after
timeout ->
Process.demonitor(ref, [:flush])
exit({:timeout, {__MODULE__, :await, [task, timeout]}})
end
end
@doc false
# TODO: Remove on 2.0
# (hard-deprecated in elixir_dispatch)
def find(tasks, {ref, reply}) when is_reference(ref) do
Enum.find_value tasks, fn
%Task{ref: ^ref} = task ->
Process.demonitor(ref, [:flush])
{reply, task}
%Task{} ->
nil
end
end
def find(tasks, {:DOWN, ref, _, proc, reason} = msg) when is_reference(ref) do
find = fn %Task{ref: task_ref} -> task_ref == ref end
if Enum.find(tasks, find) do
exit({reason(reason, proc), {__MODULE__, :find, [tasks, msg]}})
end
end
def find(_tasks, _msg) do
nil
end
@doc ~S"""
Temporarily blocks the current process waiting for a task reply.
Returns `{:ok, reply}` if the reply is received, `nil` if
no reply has arrived, or `{:exit, reason}` if the task has already
exited. Keep in mind that normally a task failure also causes
the process owning the task to exit. Therefore this function can
return `{:exit, reason}` only if
* the task process exited with the reason `:normal`
* it isn't linked to the caller
* the caller is trapping exits
A timeout, in milliseconds, can be given with default value
of `5000`. If the time runs out before a message from
the task is received, this function will return `nil`
and the monitor will remain active. Therefore `yield/2` can be
called multiple times on the same task.
This function assumes the task's monitor is still active or the
monitor's `:DOWN` message is in the message queue. If it has been
demonitored or the message already received, this function will wait
for the duration of the timeout awaiting the message.
If you intend to shut the task down if it has not responded within `timeout`
milliseconds, you should chain this together with `shutdown/1`, like so:
case Task.yield(task, timeout) || Task.shutdown(task) do
{:ok, result} ->
result
nil ->
Logger.warn "Failed to get a result in #{timeout}ms"
nil
end
That ensures that if the task completes after the `timeout` but before `shutdown/1`
has been called, you will still get the result, since `shutdown/1` is designed to
handle this case and return the result.
"""
@spec yield(t, timeout) :: {:ok, term} | {:exit, term} | nil
def yield(task, timeout \\ 5_000)
def yield(%Task{owner: owner} = task, _) when owner != self() do
raise ArgumentError, invalid_owner_error(task)
end
def yield(%Task{ref: ref} = task, timeout) do
receive do
{^ref, reply} ->
Process.demonitor(ref, [:flush])
{:ok, reply}
{:DOWN, ^ref, _, proc, :noconnection} ->
exit({reason(:noconnection, proc), {__MODULE__, :yield, [task, timeout]}})
{:DOWN, ^ref, _, _, reason} ->
{:exit, reason}
after
timeout ->
nil
end
end
@doc """
Yields to multiple tasks in the given time interval.
This function receives a list of tasks and waits for their
replies in the given time interval. It returns a list
of tuples of two elements, with the task as the first element
and the yielded result as the second.
Similarly to `yield/2`, each task's result will be
* `{:ok, term}` if the task has successfully reported its
result back in the given time interval
* `{:exit, reason}` if the task has died
* `nil` if the task keeps running past the timeout
Check `yield/2` for more information.
## Example
`Task.yield_many/2` allows developers to spawn multiple tasks
and retrieve the results received in a given timeframe.
If we combine it with `Task.shutdown/2`, it allows us to gather
those results and cancel the tasks that have not replied in time.
Let's see an example.
tasks =
for i <- 1..10 do
Task.async(fn ->
Process.sleep(i * 1000)
i
end)
end
tasks_with_results = Task.yield_many(tasks, 5000)
results = Enum.map(tasks_with_results, fn {task, res} ->
# Shutdown the tasks that did not reply nor exit
res || Task.shutdown(task, :brutal_kill)
end)
# Here we are matching only on {:ok, value} and
# ignoring {:exit, _} (crashed tasks) and `nil` (no replies)
for {:ok, value} <- results do
IO.inspect value
end
In the example above, we create tasks that sleep from 1
up to 10 seconds and return the amount of seconds they slept.
If you execute the code all at once, you should see 1 up to 5
printed, as those were the tasks that have replied in the
given time. All other tasks will have been shut down using
the `Task.shutdown/2` call.
"""
@spec yield_many([t], timeout) :: [{t, {:ok, term} | {:exit, term} | nil}]
def yield_many(tasks, timeout \\ 5000) do
timeout_ref = make_ref()
timer_ref = Process.send_after(self(), timeout_ref, timeout)
try do
yield_many(tasks, timeout_ref, :infinity)
catch
{:noconnection, reason} ->
exit({reason, {__MODULE__, :yield_many, [tasks, timeout]}})
after
Process.cancel_timer(timer_ref)
receive do: (^timeout_ref -> :ok), after: (0 -> :ok)
end
end
defp yield_many([%Task{ref: ref, owner: owner} = task | rest], timeout_ref, timeout) do
if owner != self() do
raise ArgumentError, invalid_owner_error(task)
end
receive do
{^ref, reply} ->
Process.demonitor(ref, [:flush])
[{task, {:ok, reply}} | yield_many(rest, timeout_ref, timeout)]
{:DOWN, ^ref, _, proc, :noconnection} ->
throw({:noconnection, reason(:noconnection, proc)})
{:DOWN, ^ref, _, _, reason} ->
[{task, {:exit, reason}} | yield_many(rest, timeout_ref, timeout)]
^timeout_ref ->
[{task, nil} | yield_many(rest, timeout_ref, 0)]
after
timeout ->
[{task, nil} | yield_many(rest, timeout_ref, 0)]
end
end
defp yield_many([], _timeout_ref, _timeout) do
[]
end
@doc """
Unlinks and shuts down the task, and then checks for a reply.
Returns `{:ok, reply}` if the reply is received while shutting down the task,
`{:exit, reason}` if the task died, otherwise `nil`.
The shutdown method is either a timeout or `:brutal_kill`. In case
of a `timeout`, a `:shutdown` exit signal is sent to the task process
and if it does not exit within the timeout, it is killed. With `:brutal_kill`
the task is killed straight away. In case the task terminates abnormally
(possibly killed by another process), this function will exit with the same reason.
It is not required to call this function when terminating the caller, unless
exiting with reason `:normal` or if the task is trapping exits. If the caller is
exiting with a reason other than `:normal` and the task is not trapping exits, the
caller's exit signal will stop the task. The caller can exit with reason
`:shutdown` to shutdown all of its linked processes, including tasks, that
are not trapping exits without generating any log messages.
If a task's monitor has already been demonitored or received and there is not
a response waiting in the message queue this function will return
`{:exit, :noproc}` as the result or exit reason can not be determined.
"""
@spec shutdown(t, timeout | :brutal_kill) :: {:ok, term} | {:exit, term} | nil
def shutdown(task, shutdown \\ 5_000)
def shutdown(%Task{pid: nil} = task, _) do
raise ArgumentError, "task #{inspect task} does not have an associated task process"
end
def shutdown(%Task{owner: owner} = task, _) when owner != self() do
raise ArgumentError, invalid_owner_error(task)
end
def shutdown(%Task{pid: pid} = task, :brutal_kill) do
mon = Process.monitor(pid)
exit(pid, :kill)
case shutdown_receive(task, mon, :brutal_kill, :infinity) do
{:down, proc, :noconnection} ->
exit({reason(:noconnection, proc), {__MODULE__, :shutdown, [task, :brutal_kill]}})
{:down, _, reason} ->
{:exit, reason}
result ->
result
end
end
def shutdown(%Task{pid: pid} = task, timeout) do
mon = Process.monitor(pid)
exit(pid, :shutdown)
case shutdown_receive(task, mon, :shutdown, timeout) do
{:down, proc, :noconnection} ->
exit({reason(:noconnection, proc), {__MODULE__, :shutdown, [task, timeout]}})
{:down, _, reason} ->
{:exit, reason}
result ->
result
end
end
## Helpers
defp reason(:noconnection, proc), do: {:nodedown, monitor_node(proc)}
defp reason(reason, _), do: reason
defp monitor_node(pid) when is_pid(pid), do: node(pid)
defp monitor_node({_, node}), do: node
# spawn a process to ensure task gets exit signal if process dies from exit signal
# between unlink and exit.
defp exit(task, reason) do
caller = self()
ref = make_ref()
enforcer = spawn(fn() -> enforce_exit(task, reason, caller, ref) end)
Process.unlink(task)
Process.exit(task, reason)
send(enforcer, {:done, ref})
:ok
end
defp enforce_exit(pid, reason, caller, ref) do
mon = Process.monitor(caller)
receive do
{:done, ^ref} -> :ok
{:DOWN, ^mon, _, _, _} -> Process.exit(pid, reason)
end
end
defp shutdown_receive(%{ref: ref} = task, mon, type, timeout) do
receive do
{:DOWN, ^mon, _, _, :shutdown} when type in [:shutdown, :timeout_kill] ->
Process.demonitor(ref, [:flush])
flush_reply(ref)
{:DOWN, ^mon, _, _, :killed} when type == :brutal_kill ->
Process.demonitor(ref, [:flush])
flush_reply(ref)
{:DOWN, ^mon, _, proc, :noproc} ->
reason = flush_noproc(ref, proc, type)
flush_reply(ref) || reason
{:DOWN, ^mon, _, proc, reason} ->
Process.demonitor(ref, [:flush])
flush_reply(ref) || {:down, proc, reason}
after
timeout ->
Process.exit(task.pid, :kill)
shutdown_receive(task, mon, :timeout_kill, :infinity)
end
end
defp flush_reply(ref) do
receive do
{^ref, reply} -> {:ok, reply}
after
0 -> nil
end
end
defp flush_noproc(ref, proc, type) do
receive do
{:DOWN, ^ref, _, _, :shutdown} when type in [:shutdown, :timeout_kill] ->
nil
{:DOWN, ^ref, _, _, :killed} when type == :brutal_kill ->
nil
{:DOWN, ^ref, _, _, reason} ->
{:down, proc, reason}
after
0 ->
Process.demonitor(ref, [:flush])
{:down, proc, :noproc}
end
end
defp invalid_owner_error(task) do
"task #{inspect task} must be queried from the owner but was queried from #{inspect self()}"
end
end
| 34.380764 | 103 | 0.673079 |
7989407303fd5f044d98e06eedceb11b20a41e79 | 5,593 | ex | Elixir | lib/ex_bitmex/web_socket.ex | eduardoscottini/ex_bitmex | f8528bd635922e1777a5b01ea4941d625da7396e | [
"MIT"
] | 6 | 2019-02-13T04:05:19.000Z | 2020-12-31T07:40:09.000Z | lib/ex_bitmex/web_socket.ex | eduardoscottini/ex_bitmex | f8528bd635922e1777a5b01ea4941d625da7396e | [
"MIT"
] | 28 | 2021-03-29T06:46:42.000Z | 2022-03-28T11:03:38.000Z | lib/ex_bitmex/web_socket.ex | yurikoval/ex_bitmex | d9492789fb319fbdf78d90a99f7c0e40c95c1885 | [
"MIT"
] | 4 | 2019-05-03T21:27:10.000Z | 2021-01-12T09:26:34.000Z | defmodule ExBitmex.WebSocket do
@moduledoc """
BitMEX WebSocket client.
"""
import Process, only: [send_after: 3]
defmacro __using__(_opts) do
quote do
use WebSockex
require Logger
## API
def start_link(args \\ %{}) do
subscription = args[:subscribe] || []
auth_subscription = args[:auth_subscribe] || []
opts = consturct_opts(args)
state =
args
|> Map.merge(%{
subscribe: subscription,
auth_subscribe: auth_subscription,
heartbeat: 0
})
|> Map.merge(Map.new(opts))
WebSockex.start_link(base_uri(), __MODULE__, state, opts)
end
defp consturct_opts(args) do
name = args[:name] || __MODULE__
debug =
case args do
%{trace: true} -> [debug: [:trace]]
_ -> []
end
[name: name] ++ debug
end
## WebSocket Callbacks
@impl true
def handle_connect(_conn, %{name: name} = state) do
:ok = Logger.info("#{name} connected")
send(self(), :ws_subscribe)
{:ok, state}
end
@impl true
def handle_disconnect(disconnect_map, state) do
:ok = Logger.warn("#{__MODULE__} disconnected: #{inspect(disconnect_map)}")
{:reconnect, state}
end
@impl true
def handle_pong(:pong, state) do
{:ok, inc_heartbeat(state)}
end
@impl true
def handle_frame({:text, text}, state) do
case Jason.decode(text) do
{:ok, %{"request" => %{"op" => "authKey"}, "success" => true} = payload} ->
subscribe(self(), state[:auth_subscribe])
handle_response(payload, state)
{:ok, payload} ->
handle_response(payload, state)
end
{:ok, inc_heartbeat(state)}
end
@impl true
def handle_frame(msg, %{name: name} = state) do
:ok = Logger.warn("#{name} received unexpected WebSocket response: " <> inspect(msg))
{:ok, state}
end
## OTP Callbacks
@impl true
def handle_cast(_msg, state) do
{:ok, state}
end
@impl true
def handle_info(
:ws_subscribe,
%{subscribe: subscription, auth_subscribe: auth_subscription} = state
) do
if match?([_ | _], subscription) do
subscribe(self(), subscription)
end
if match?([_ | _], auth_subscription) do
authenticate(self(), Map.get(state, :config))
end
send_after(self(), {:heartbeat, :ping, 1}, 20_000)
{:ok, state}
end
@impl true
def handle_info(
{:heartbeat, :ping, expected_heartbeat},
%{heartbeat: heartbeat} = state
) do
if heartbeat >= expected_heartbeat do
send_after(self(), {:heartbeat, :ping, heartbeat + 1}, 1_000)
{:ok, state}
else
if not test_mode() do
:ok =
Logger.warn("#{__MODULE__} sent heartbeat ##{heartbeat} due to low connectivity")
end
send_after(self(), {:heartbeat, :pong, heartbeat + 1}, 4_000)
{:reply, :ping, state}
end
end
@impl true
def handle_info(
{:heartbeat, :pong, expected_heartbeat},
%{heartbeat: heartbeat} = state
) do
if heartbeat >= expected_heartbeat do
send_after(self(), {:heartbeat, :ping, heartbeat + 1}, 1_000)
{:ok, state}
else
:ok = Logger.warn("#{__MODULE__} terminated due to " <> "no heartbeat ##{heartbeat}")
{:close, state}
end
end
@impl true
def handle_info({:ws_reply, frame}, state) do
{:reply, frame, state}
end
@impl true
def handle_info(error, state) do
output_error(error, state, "received unexpected message")
{:ok, state}
end
@impl true
def terminate(info, %{name: name} = _state) do
:ok = Logger.error("#{name} terminated - #{inspect(info)} ")
end
## Helpers
def reply_op(server, op, args) do
json = Jason.encode!(%{op: op, args: args})
send(server, {:ws_reply, {:text, json}})
end
def subscribe(server, channels) do
reply_op(server, "subscribe", channels)
end
def authenticate(server, config) do
nonce = ExBitmex.Auth.nonce()
%{api_key: api_key, api_secret: api_secret} = ExBitmex.Credentials.config(config)
if is_nil(api_key) || is_nil(api_secret) do
:ok = Logger.error("Missing Bitmex API credentials")
send(server, :terminate)
else
sig = ExBitmex.Auth.sign(api_secret, "GET", "/realtime", nonce, "")
reply_op(server, "authKey", [api_key, nonce, sig])
end
end
def handle_response(resp, _state) do
:ok = Logger.debug("#{__MODULE__} received response: #{inspect(resp)}")
end
defp inc_heartbeat(%{heartbeat: heartbeat} = state) do
Map.put(state, :heartbeat, heartbeat + 1)
end
defp output_error(error, state, msg) do
:ok =
Logger.error("#{__MODULE__} #{msg}: #{inspect(error)}" <> "\nstate: #{inspect(state)}")
end
defp test_mode do
Application.get_env(:ex_bitmex, :test_mode, false)
end
defp base_uri do
"wss://" <> ((test_mode() && "testnet") || "www") <> ".bitmex.com/realtime"
end
defoverridable handle_response: 2, handle_disconnect: 2
end
end
end
| 27.150485 | 97 | 0.548006 |
798950c7583214bbdf239acb551f704664182ee4 | 3,200 | ex | Elixir | clients/run/lib/google_api/run/v1/model/expr.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/run/lib/google_api/run/v1/model/expr.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/run/lib/google_api/run/v1/model/expr.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Run.V1.Model.Expr do
@moduledoc """
Represents a textual expression in the Common Expression Language (CEL) syntax. CEL is a C-like expression language. The syntax and semantics of CEL are documented at https://github.com/google/cel-spec. Example (Comparison): title: "Summary size limit" description: "Determines if a summary is less than 100 chars" expression: "document.summary.size() < 100" Example (Equality): title: "Requestor is owner" description: "Determines if requestor is the document owner" expression: "document.owner == request.auth.claims.email" Example (Logic): title: "Public documents" description: "Determine whether the document should be publicly visible" expression: "document.type != 'private' && document.type != 'internal'" Example (Data Manipulation): title: "Notification string" description: "Create a notification string with a timestamp." expression: "'New message received at ' + string(document.create_time)" The exact variables and functions that may be referenced within an expression are determined by the service that evaluates it. See the service documentation for additional information.
## Attributes
* `description` (*type:* `String.t`, *default:* `nil`) - Optional. Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
* `expression` (*type:* `String.t`, *default:* `nil`) - Textual representation of an expression in Common Expression Language syntax.
* `location` (*type:* `String.t`, *default:* `nil`) - Optional. String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
* `title` (*type:* `String.t`, *default:* `nil`) - Optional. Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:description => String.t() | nil,
:expression => String.t() | nil,
:location => String.t() | nil,
:title => String.t() | nil
}
field(:description)
field(:expression)
field(:location)
field(:title)
end
defimpl Poison.Decoder, for: GoogleApi.Run.V1.Model.Expr do
def decode(value, options) do
GoogleApi.Run.V1.Model.Expr.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Run.V1.Model.Expr do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 57.142857 | 1,092 | 0.730938 |
79897b9398a77dec325ea67c9dab9d67f0419fa5 | 888 | exs | Elixir | test/test_helper.exs | pguillory/hypex | bc6e315ea2471fc0604c81589984c7ab82c0088c | [
"MIT"
] | 14 | 2018-01-02T15:35:36.000Z | 2021-08-30T21:41:55.000Z | test/test_helper.exs | pguillory/hypex | bc6e315ea2471fc0604c81589984c7ab82c0088c | [
"MIT"
] | 2 | 2019-04-26T03:39:01.000Z | 2020-03-26T06:22:06.000Z | test/test_helper.exs | pguillory/hypex | bc6e315ea2471fc0604c81589984c7ab82c0088c | [
"MIT"
] | 2 | 2018-06-22T00:50:41.000Z | 2020-03-25T18:28:23.000Z | ExUnit.start()
defmodule TestHelper do
def calculate_m(b) do
2
|> :math.pow(b)
|> round
|> (&(&1 * b)).()
end
def read_files_r(root) do
root
|> File.ls!
|> Enum.map(&(Path.join(root, &1)))
|> Enum.reduce([], fn(path, paths) ->
if File.dir?(path) do
[read_files_r(path)|paths]
else
[path|paths]
end
end)
|> List.flatten
end
def read_large_registers do
__ENV__.file
|> Path.dirname
|> Path.join("resources")
|> Path.join("large_registers.txt")
|> File.read!
|> String.split(",")
|> Enum.map(fn(bit) ->
bit
|> String.strip
|> Integer.parse
|> Kernel.elem(0)
end)
|> :erlang.list_to_bitstring
end
end
__ENV__.file
|> Path.dirname
|> Path.join("hypex")
|> TestHelper.read_files_r
|> Enum.each(&(Code.require_file/1))
| 18.122449 | 41 | 0.541667 |
798a070cace53b568eaf30f5afd76de0a9d6132b | 2,188 | ex | Elixir | clients/app_engine/lib/google_api/app_engine/v1/model/standard_scheduler_settings.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/app_engine/lib/google_api/app_engine/v1/model/standard_scheduler_settings.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/app_engine/lib/google_api/app_engine/v1/model/standard_scheduler_settings.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AppEngine.V1.Model.StandardSchedulerSettings do
@moduledoc """
Scheduler settings for standard environment.
## Attributes
* `maxInstances` (*type:* `integer()`, *default:* `nil`) - Maximum number of instances to run for this version. Set to zero to disable max_instances configuration.
* `minInstances` (*type:* `integer()`, *default:* `nil`) - Minimum number of instances to run for this version. Set to zero to disable min_instances configuration.
* `targetCpuUtilization` (*type:* `float()`, *default:* `nil`) - Target CPU utilization ratio to maintain when scaling.
* `targetThroughputUtilization` (*type:* `float()`, *default:* `nil`) - Target throughput utilization ratio to maintain when scaling
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:maxInstances => integer(),
:minInstances => integer(),
:targetCpuUtilization => float(),
:targetThroughputUtilization => float()
}
field(:maxInstances)
field(:minInstances)
field(:targetCpuUtilization)
field(:targetThroughputUtilization)
end
defimpl Poison.Decoder, for: GoogleApi.AppEngine.V1.Model.StandardSchedulerSettings do
def decode(value, options) do
GoogleApi.AppEngine.V1.Model.StandardSchedulerSettings.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AppEngine.V1.Model.StandardSchedulerSettings do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.071429 | 167 | 0.734461 |
798a28daed7fd88a19c92815efb402f89377b608 | 148 | exs | Elixir | test/ex_double_entry/guard_test.exs | coinjar/ex_double_entry | e2fde666daac1b42e7a32f704dabb61f8283a70c | [
"MIT"
] | 9 | 2022-02-05T08:00:05.000Z | 2022-03-20T04:53:46.000Z | test/ex_double_entry/guard_test.exs | coinjar/ex_double_entry | e2fde666daac1b42e7a32f704dabb61f8283a70c | [
"MIT"
] | 1 | 2022-02-24T06:14:07.000Z | 2022-02-24T06:14:07.000Z | test/ex_double_entry/guard_test.exs | coinjar/ex_double_entry | e2fde666daac1b42e7a32f704dabb61f8283a70c | [
"MIT"
] | 1 | 2022-02-24T02:54:01.000Z | 2022-02-24T02:54:01.000Z | defmodule ExDoubleEntry.GuardTest do
use ExDoubleEntry.DataCase, async: true
alias ExDoubleEntry.{Account, Guard, Transfer}
doctest Guard
end
| 24.666667 | 48 | 0.804054 |
798a76d7fd342cf84f165b8d103b159a7048df62 | 9,208 | ex | Elixir | clients/poly/lib/google_api/poly/v1/api/users.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/poly/lib/google_api/poly/v1/api/users.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/poly/lib/google_api/poly/v1/api/users.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Poly.V1.Api.Users do
@moduledoc """
API calls for all endpoints tagged `Users`.
"""
alias GoogleApi.Poly.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Lists assets authored by the given user. Only the value 'me', representing
the currently-authenticated user, is supported. May include assets with an
access level of PRIVATE or
UNLISTED and assets which are
All Rights Reserved for the
currently-authenticated user.
## Parameters
* `connection` (*type:* `GoogleApi.Poly.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - A valid user id. Currently, only the special value 'me', representing the
currently-authenticated user is supported. To use 'me', you must pass
an OAuth token with the request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:format` (*type:* `String.t`) - Return only assets with the matching format. Acceptable values are:
`BLOCKS`, `FBX`, `GLTF`, `GLTF2`, `OBJ`, and `TILT`.
* `:orderBy` (*type:* `String.t`) - Specifies an ordering for assets. Acceptable values are:
`BEST`, `NEWEST`, `OLDEST`. Defaults to `BEST`, which ranks assets
based on a combination of popularity and other features.
* `:pageSize` (*type:* `integer()`) - The maximum number of assets to be returned. This value must be between `1`
and `100`. Defaults to `20`.
* `:pageToken` (*type:* `String.t`) - Specifies a continuation token from a previous search whose results were
split into multiple pages. To get the next page, submit the same request
specifying the value from
next_page_token.
* `:visibility` (*type:* `String.t`) - The visibility of the assets to be returned.
Defaults to
VISIBILITY_UNSPECIFIED
which returns all assets.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Poly.V1.Model.ListUserAssetsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec poly_users_assets_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Poly.V1.Model.ListUserAssetsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def poly_users_assets_list(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:format => :query,
:orderBy => :query,
:pageSize => :query,
:pageToken => :query,
:visibility => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}/assets", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Poly.V1.Model.ListUserAssetsResponse{}])
end
@doc """
Lists assets that the user has liked. Only the value 'me', representing
the currently-authenticated user, is supported. May include assets with an
access level of UNLISTED.
## Parameters
* `connection` (*type:* `GoogleApi.Poly.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - A valid user id. Currently, only the special value 'me', representing the
currently-authenticated user is supported. To use 'me', you must pass
an OAuth token with the request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:format` (*type:* `String.t`) - Return only assets with the matching format. Acceptable values are:
`BLOCKS`, `FBX`, `GLTF`, `GLTF2`, `OBJ`, `TILT`.
* `:orderBy` (*type:* `String.t`) - Specifies an ordering for assets. Acceptable values are:
`BEST`, `NEWEST`, `OLDEST`, 'LIKED_TIME'. Defaults to `LIKED_TIME`, which
ranks assets based on how recently they were liked.
* `:pageSize` (*type:* `integer()`) - The maximum number of assets to be returned. This value must be between `1`
and `100`. Defaults to `20`.
* `:pageToken` (*type:* `String.t`) - Specifies a continuation token from a previous search whose results were
split into multiple pages. To get the next page, submit the same request
specifying the value from
next_page_token.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Poly.V1.Model.ListLikedAssetsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec poly_users_likedassets_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Poly.V1.Model.ListLikedAssetsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def poly_users_likedassets_list(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:format => :query,
:orderBy => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}/likedassets", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Poly.V1.Model.ListLikedAssetsResponse{}])
end
end
| 47.958333 | 196 | 0.635209 |
798a90937c57fa03463e9ffb28d7f0d55305ca5d | 493 | ex | Elixir | fade/lib/snapshot/types/broker_runtime_snapshot.ex | ahives/Fade | 7094b6703933e41a1400b1053764335e32928b0a | [
"Apache-2.0"
] | null | null | null | fade/lib/snapshot/types/broker_runtime_snapshot.ex | ahives/Fade | 7094b6703933e41a1400b1053764335e32928b0a | [
"Apache-2.0"
] | null | null | null | fade/lib/snapshot/types/broker_runtime_snapshot.ex | ahives/Fade | 7094b6703933e41a1400b1053764335e32928b0a | [
"Apache-2.0"
] | null | null | null | defmodule Fade.Snapshot.Types.BrokerRuntimeSnapshot do
use TypedStruct
alias Fade.Snapshot.Types.{GarbageCollection, RuntimeDatabase, RuntimeProcessChurnMetrics}
typedstruct do
field(:identifier, String.t())
field(:cluster_identifier, String.t())
field(:version, String.t())
field(:processes, RuntimeProcessChurnMetrics.t())
field(:database, RuntimeDatabase.t())
field(:gc, GarbageCollection.t())
def new(fields), do: struct!(__MODULE__, fields)
end
end
| 29 | 92 | 0.738337 |
798a9257968b6de2f85ef399952b3c50e3e43acb | 1,946 | ex | Elixir | lib/generator.ex | cthree/ExCRC | 1e9a110ec156f64aed1fb00d3c7274d56e113ce9 | [
"Apache-2.0"
] | 4 | 2018-01-21T07:35:30.000Z | 2021-10-20T10:17:34.000Z | lib/generator.ex | cthree/ExCRC | 1e9a110ec156f64aed1fb00d3c7274d56e113ce9 | [
"Apache-2.0"
] | null | null | null | lib/generator.ex | cthree/ExCRC | 1e9a110ec156f64aed1fb00d3c7274d56e113ce9 | [
"Apache-2.0"
] | 1 | 2018-07-10T11:28:52.000Z | 2018-07-10T11:28:52.000Z | defmodule ExCRC.Generator do
@moduledoc """
Functions used to generate the static tables included by this library
"""
use Bitwise
#
# Provide `print_crc_table/2` with a map and it will print the
# map in Elixir syntax which can be used statically. This is used to
# generate the tables in the `ExCRC.Tables` module.
#
def print_crc_table(table, width \\ 4) do
data =
table
|> Enum.to_list()
|> List.keysort(0)
|> Enum.map(fn({k,v}) ->
key = :io_lib.format("~2.16.0b", [k])
value = :io_lib.format("~4.16.0b", [v])
"0x#{key} => 0x#{value}"
end)
|> Enum.chunk_every(width, width, [])
|> Enum.map(fn(row) -> Enum.join(row, ", ") end)
|> Enum.map(fn(row) -> " #{row}," end)
|> Enum.join("\n")
trimmed =
data
|> String.trim_trailing("\n")
|> String.trim_trailing(",")
IO.puts("%{\n#{trimmed}\n}")
end
# Build the table data for polynomial 0x1021, return a map
def ccitt_table() do
for i <- 0..255, into: %{} do
crc = 0
c = i <<< 8
{i, ccitt_entry(c, crc, 0, 0x1021) &&& 0xffff}
end
end
# Compute a entry
defp ccitt_entry(_, crc, 8, _), do: crc
defp ccitt_entry(c, crc, bc, polynom) do
case (crc ^^^ c) &&& 0x8000 do
0 -> ccitt_entry(c <<< 1, crc <<< 1, bc + 1, polynom)
_ -> ccitt_entry(c <<< 1, (crc <<< 1) ^^^ polynom, bc + 1, polynom)
end
end
# Build the table data for polynomial 0x8408, return a map
def kermit_table() do
for i <- 0..255, into: %{} do
crc = 0
c = i
{i, kermit_entry(c, crc, 0, 0x8408) &&& 0xffff}
end
end
# Compute a entry
defp kermit_entry(_, crc, 8, _), do: crc
defp kermit_entry(c, crc, bc, polynom) do
case (crc ^^^ c) &&& 1 do
0 -> kermit_entry(c >>> 1, crc >>> 1, bc + 1, polynom)
_ -> kermit_entry(c >>> 1, (crc >>> 1) ^^^ polynom, bc + 1, polynom)
end
end
end
| 26.657534 | 74 | 0.549332 |
798a9d673f1588f9c171281522ea0a8c3920eda0 | 115 | ex | Elixir | lib/blog/mailer.ex | ench0/blog | 04f7df2357b13dddee9d82cd1c35bbd0ce9618a9 | [
"MIT"
] | 2 | 2017-06-08T23:28:13.000Z | 2017-06-08T23:28:16.000Z | lib/blog/mailer.ex | ench0/blog | 04f7df2357b13dddee9d82cd1c35bbd0ce9618a9 | [
"MIT"
] | null | null | null | lib/blog/mailer.ex | ench0/blog | 04f7df2357b13dddee9d82cd1c35bbd0ce9618a9 | [
"MIT"
] | null | null | null | defmodule Blog.Mailer do
@moduledoc """
Swoosh mailer for Blog.
"""
use Swoosh.Mailer, otp_app: :blog
end
| 14.375 | 35 | 0.678261 |
798aaed2ab1fddb26e5508a45881883c93f3fb1c | 824 | exs | Elixir | binariesstringscharlist/binariesbitstrings.exs | ezkemboi/elixir | 5641c8c9cc24b5e51546328b7fbba04d3b28e3b2 | [
"Apache-2.0"
] | 1 | 2020-01-21T19:58:41.000Z | 2020-01-21T19:58:41.000Z | binariesstringscharlist/binariesbitstrings.exs | ezkemboi/elixir | 5641c8c9cc24b5e51546328b7fbba04d3b28e3b2 | [
"Apache-2.0"
] | null | null | null | binariesstringscharlist/binariesbitstrings.exs | ezkemboi/elixir | 5641c8c9cc24b5e51546328b7fbba04d3b28e3b2 | [
"Apache-2.0"
] | null | null | null | # defining binary using << >>
IO.inspect <<1, 2, 3>> # will print <<1, 2, 3>>
# size of byte
IO.puts byte_size(<< 1,2,3 >>)
# binary is a sequence of bytes
IO.puts String.valid?(<<239, 191, 19>>) # returns false
# binary concatanation operator/string concatanation
IO.inspect <<1, 2, 3>> <> <<4, 5>>
# concatinate inner binary with string to see inner reperesentation
IO.inspect "EzKemboi" <> <<0>> # prints <<104, 101, 108, 108, 111, 0>>
# view string's binary representations
IO.inspect("ezkemboi", binaries: :as_binaries)
# pattern match binary strings
<<x, 2, y>> = <<1, 2, 3>>
IO.inspect x
IO.inspect y
# want to match binary of unknown size
# use binary modifier at the end of the pattern
<<0, 1, x ::binary >> = <<0, 1, 2, 3>>
IO.inspect x
# string concatanation
"ez" <> name = "ezkemboi"
IO.puts name # print kemboi
| 34.333333 | 70 | 0.669903 |
798ac831da25acc2b01f1907dcb5ce8331a61707 | 870 | exs | Elixir | test/secrets_cache_test.exs | FloatingGhost/secrets_cache | 72cb49ee5ba51e21d516ad729943cbd97d972e25 | [
"MIT"
] | 1 | 2020-05-11T09:00:53.000Z | 2020-05-11T09:00:53.000Z | test/secrets_cache_test.exs | FloatingGhost/secrets_cache | 72cb49ee5ba51e21d516ad729943cbd97d972e25 | [
"MIT"
] | null | null | null | test/secrets_cache_test.exs | FloatingGhost/secrets_cache | 72cb49ee5ba51e21d516ad729943cbd97d972e25 | [
"MIT"
] | null | null | null | defmodule SecretsCacheTest do
use ExUnit.Case
describe "Config merge" do
test "works for flat keys" do
secret = %{
key: "value"
}
|> Jason.encode!()
{:ok, true} = Cachex.put(:aws_secrets_cache, "test", secret)
Application.put_env(:my_app, :existing_key, "yui")
result = SecretsCache.get_config(:my_app, "test")
assert result[:existing_key] == "yui"
assert result[:key] == "value"
end
test "works for embedded keys" do
secret = %{
"key.embedded": "value"
}
|> Jason.encode!()
{:ok, true} = Cachex.put(:aws_secrets_cache, "test", secret)
Application.put_env(:my_app, :existing_key, "yui")
result = SecretsCache.get_config(:my_app, "test")
assert result[:existing_key] == "yui"
assert result[:key][:embedded] == "value"
end
end
end
| 25.588235 | 66 | 0.596552 |
798af181917cc5110f00c40b72530fa0e88a2081 | 1,138 | ex | Elixir | lib/nomex/response.ex | neojin/nomad-elixir | 95d169adcfe42549a4f7d8e939c7a47e211fbb04 | [
"ECL-2.0",
"Apache-2.0"
] | 5 | 2017-12-08T17:56:01.000Z | 2022-01-04T00:26:43.000Z | lib/nomex/response.ex | neojin/nomad-elixir | 95d169adcfe42549a4f7d8e939c7a47e211fbb04 | [
"ECL-2.0",
"Apache-2.0"
] | 6 | 2017-11-24T18:10:47.000Z | 2017-11-26T17:58:11.000Z | lib/nomex/response.ex | neojin/nomad-elixir | 95d169adcfe42549a4f7d8e939c7a47e211fbb04 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2021-07-20T15:27:13.000Z | 2021-07-20T15:27:13.000Z | defmodule Nomex.Response do
alias Nomex.Response
@type t :: %Response{
headers: list,
body: map,
request_url: String.t,
status_code: integer
}
defstruct headers: [], body: {}, request_url: "", status_code: 0
@typedoc """
tuple that wraps response from `HTTPoison`.
Returns the status of the request made, along with the `Response`
"""
@type tuple_t :: { :ok | :error, Response.t }
def parse(%HTTPoison.Response{ status_code: 200 } = response) do
nomad_response = common_data(response)
%{ nomad_response | body: decode(response.body) }
end
def parse(response) do
# in a non-200 response, nomad's body is a string and not JSON
# should a map be returned to keep things consistent?
body = %{ error: response.body }
nomad_response = common_data(response)
%{ nomad_response | body: body }
end
defp common_data(response) do
%Response{
headers: response.headers,
request_url: response.request_url,
status_code: response.status_code
}
end
defp decode(_ = "") do
%{}
end
defp decode(body) do
body |> Poison.decode!
end
end
| 23.708333 | 67 | 0.65993 |
798b1e3a782a80e3860fd2cb2c4699aefe0f2a3d | 3,369 | ex | Elixir | lib/jumubase/accounts/user.ex | richeterre/jumubase-phoenix | 7584f890af117d496971b5284bf9de798e22266f | [
"MIT"
] | 2 | 2019-01-20T07:03:30.000Z | 2019-04-11T10:20:14.000Z | lib/jumubase/accounts/user.ex | richeterre/jumubase-phoenix | 7584f890af117d496971b5284bf9de798e22266f | [
"MIT"
] | 6 | 2018-09-20T05:52:14.000Z | 2019-04-23T19:27:39.000Z | lib/jumubase/accounts/user.ex | richeterre/jumubase-phoenix | 7584f890af117d496971b5284bf9de798e22266f | [
"MIT"
] | null | null | null | defmodule Jumubase.Accounts.User do
use Jumubase.Schema
import Ecto.Changeset
import Jumubase.Gettext
alias Jumubase.JumuParams
alias Jumubase.Utils
alias Jumubase.Accounts.User
alias Jumubase.Foundation.Host
schema "users" do
field :given_name, :string
field :family_name, :string
field :email, :string
field :password, :string, virtual: true, redact: true
field :hashed_password, :string, redact: true
field :role, :string
many_to_many :hosts, Host, join_through: "hosts_users", on_replace: :delete
timestamps()
end
@base_attrs [:given_name, :family_name, :email, :role]
def changeset(%User{} = user, attrs) do
user
|> cast(attrs, @base_attrs)
|> validate_required(@base_attrs)
|> validate_inclusion(:role, JumuParams.user_roles())
|> validate_format(:email, Utils.email_format())
|> unique_email()
end
@doc """
A user changeset for creating a new user.
"""
def create_changeset(%User{} = user, attrs) do
user
|> cast(attrs, @base_attrs ++ [:password])
|> validate_required(@base_attrs)
|> validate_inclusion(:role, JumuParams.user_roles())
|> validate_format(:email, Utils.email_format())
|> unique_email()
|> validate_password(hash_password: true)
end
@doc """
A user changeset for changing the password.
## Options
* `:hash_password` - Hashes the password so it can be stored securely
in the database and ensures the password field is cleared to prevent
leaks in the logs. If password hashing is not needed and clearing the
password field is not desired (like when using this changeset for
validations on a LiveView form), this option can be set to `false`.
Defaults to `true`.
"""
def password_changeset(user, attrs, opts \\ []) do
user
|> cast(attrs, [:password])
|> validate_confirmation(:password, message: dgettext("errors", "does not match password"))
|> validate_password(opts)
end
@doc """
Verifies the password.
If there is no user or the user doesn't have a password, we call
`Bcrypt.no_user_verify/0` to avoid timing attacks.
"""
def valid_password?(%User{hashed_password: hashed_password}, password)
when is_binary(hashed_password) and byte_size(password) > 0 do
Bcrypt.verify_pass(password, hashed_password)
end
def valid_password?(_, _) do
Bcrypt.no_user_verify()
false
end
# Private helpers
defp unique_email(changeset) do
validate_format(changeset, :email, ~r/^[^\s]+@[^\s]+$/)
|> validate_length(:email, max: 160)
|> unsafe_validate_unique(:email, Jumubase.Repo)
|> unique_constraint(:email)
end
defp validate_password(changeset, opts) do
changeset
|> validate_required(:password)
|> validate_length(:password, min: 8, max: 72)
|> maybe_hash_password(opts)
end
defp maybe_hash_password(changeset, opts) do
hash_password? = Keyword.get(opts, :hash_password, true)
password = get_change(changeset, :password)
if hash_password? && password && changeset.valid? do
changeset
# If using Bcrypt, then further validate it is at most 72 bytes long
|> validate_length(:password, max: 72, count: :bytes)
|> put_change(:hashed_password, Bcrypt.hash_pwd_salt(password))
|> delete_change(:password)
else
changeset
end
end
end
| 29.814159 | 95 | 0.689225 |
798b29ba4808d727f75a6b583329fbaf35745fc5 | 2,153 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/creative_click_through_url.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/creative_click_through_url.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/creative_click_through_url.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V33.Model.CreativeClickThroughUrl do
@moduledoc """
Click-through URL
## Attributes
* `computedClickThroughUrl` (*type:* `String.t`, *default:* `nil`) - Read-only convenience field representing the actual URL that will be used for this click-through. The URL is computed as follows: - If landingPageId is specified then that landing page's URL is assigned to this field. - Otherwise, the customClickThroughUrl is assigned to this field.
* `customClickThroughUrl` (*type:* `String.t`, *default:* `nil`) - Custom click-through URL. Applicable if the landingPageId field is left unset.
* `landingPageId` (*type:* `String.t`, *default:* `nil`) - ID of the landing page for the click-through URL.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:computedClickThroughUrl => String.t() | nil,
:customClickThroughUrl => String.t() | nil,
:landingPageId => String.t() | nil
}
field(:computedClickThroughUrl)
field(:customClickThroughUrl)
field(:landingPageId)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V33.Model.CreativeClickThroughUrl do
def decode(value, options) do
GoogleApi.DFAReporting.V33.Model.CreativeClickThroughUrl.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V33.Model.CreativeClickThroughUrl do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.622642 | 357 | 0.740362 |
798b3993a020842a1702436b1847a58d0d0492c7 | 484 | ex | Elixir | lib/neko_caffe/registry/adoption.ex | kamiyuzu/neko_caffe | 84afd6559c89521ce95d27d65d2f0c3a88a92d11 | [
"MIT"
] | 2 | 2020-07-30T18:40:58.000Z | 2020-08-01T00:14:05.000Z | lib/neko_caffe/registry/adoption.ex | kamiyuzu/neko_caffe | 84afd6559c89521ce95d27d65d2f0c3a88a92d11 | [
"MIT"
] | null | null | null | lib/neko_caffe/registry/adoption.ex | kamiyuzu/neko_caffe | 84afd6559c89521ce95d27d65d2f0c3a88a92d11 | [
"MIT"
] | 1 | 2021-09-23T17:24:45.000Z | 2021-09-23T17:24:45.000Z | defmodule NekoCaffe.Registry.Adoption do
@moduledoc false
use Ecto.Schema
import Ecto.Changeset
alias NekoCaffe.Clients.Owner
schema "adoptions" do
field :worker_name, :string
field :owner_id, :id
has_many :owners, Owner
timestamps()
end
@doc false
def changeset(adoption, attrs) do
adoption
|> cast(attrs, [:worker_name, :owner_id])
|> validate_required([:worker_name, :owner_id])
|> put_assoc(:owners, attrs["owners"])
end
end
| 19.36 | 51 | 0.688017 |
798b3aba8b01ce05ccdec6a01b3f7b2e34cb792c | 6,477 | exs | Elixir | test/absinthe/type/directive_test.exs | maartenvanvliet/absinthe | ebe820717200f53756e225b3dffbfefe924a94d3 | [
"MIT"
] | null | null | null | test/absinthe/type/directive_test.exs | maartenvanvliet/absinthe | ebe820717200f53756e225b3dffbfefe924a94d3 | [
"MIT"
] | 2 | 2020-07-21T05:23:37.000Z | 2020-08-26T04:56:12.000Z | test/absinthe/type/directive_test.exs | jlgeering/absinthe | a3dbc29640d613928398626ad75a8f03203a1720 | [
"MIT"
] | null | null | null | defmodule Absinthe.Type.DirectiveTest do
use Absinthe.Case, async: true
alias Absinthe.Schema
defmodule TestSchema do
use Absinthe.Schema
query do
field :nonce, :string
end
end
describe "directives" do
test "are loaded as built-ins" do
assert %{skip: "skip", include: "include"} = TestSchema.__absinthe_directives__()
assert TestSchema.__absinthe_directive__(:skip)
assert TestSchema.__absinthe_directive__("skip") == TestSchema.__absinthe_directive__(:skip)
assert Schema.lookup_directive(TestSchema, :skip) ==
TestSchema.__absinthe_directive__(:skip)
assert Schema.lookup_directive(TestSchema, "skip") ==
TestSchema.__absinthe_directive__(:skip)
end
end
describe "the `@skip` directive" do
@query_field """
query Test($skipPerson: Boolean) {
person @skip(if: $skipPerson) {
name
}
}
"""
test "is defined" do
assert Schema.lookup_directive(Absinthe.Fixtures.ContactSchema, :skip)
end
test "behaves as expected for a field" do
assert {:ok, %{data: %{"person" => %{"name" => "Bruce"}}}} ==
Absinthe.run(
@query_field,
Absinthe.Fixtures.ContactSchema,
variables: %{"skipPerson" => false}
)
assert {:ok, %{data: %{}}} ==
Absinthe.run(
@query_field,
Absinthe.Fixtures.ContactSchema,
variables: %{"skipPerson" => true}
)
assert_result(
{:ok,
%{errors: [%{message: ~s(In argument "if": Expected type "Boolean!", found null.)}]}},
run(@query_field, Absinthe.Fixtures.ContactSchema)
)
end
@query_fragment """
query Test($skipAge: Boolean) {
person {
name
...Aging @skip(if: $skipAge)
}
}
fragment Aging on Person {
age
}
"""
test "behaves as expected for a fragment" do
assert_result(
{:ok, %{data: %{"person" => %{"name" => "Bruce", "age" => 35}}}},
run(@query_fragment, Absinthe.Fixtures.ContactSchema, variables: %{"skipAge" => false})
)
assert_result(
{:ok, %{data: %{"person" => %{"name" => "Bruce"}}}},
run(@query_fragment, Absinthe.Fixtures.ContactSchema, variables: %{"skipAge" => true})
)
assert_result(
{:ok,
%{errors: [%{message: ~s(In argument "if": Expected type "Boolean!", found null.)}]}},
run(@query_fragment, Absinthe.Fixtures.ContactSchema)
)
end
end
describe "the `@include` directive" do
@query_field """
query Test($includePerson: Boolean) {
person @include(if: $includePerson) {
name
}
}
"""
test "is defined" do
assert Schema.lookup_directive(Absinthe.Fixtures.ContactSchema, :include)
end
test "behaves as expected for a field" do
assert_result(
{:ok, %{data: %{"person" => %{"name" => "Bruce"}}}},
run(@query_field, Absinthe.Fixtures.ContactSchema, variables: %{"includePerson" => true})
)
assert_result(
{:ok, %{data: %{}}},
run(@query_field, Absinthe.Fixtures.ContactSchema, variables: %{"includePerson" => false})
)
assert_result(
{:ok,
%{
errors: [
%{
locations: [%{column: 0, line: 2}],
message: ~s(In argument "if": Expected type "Boolean!", found null.)
}
]
}},
run(@query_field, Absinthe.Fixtures.ContactSchema)
)
end
@query_fragment """
query Test($includeAge: Boolean) {
person {
name
...Aging @include(if: $includeAge)
}
}
fragment Aging on Person {
age
}
"""
test "behaves as expected for a fragment" do
assert {:ok, %{data: %{"person" => %{"name" => "Bruce", "age" => 35}}}} ==
Absinthe.run(
@query_fragment,
Absinthe.Fixtures.ContactSchema,
variables: %{"includeAge" => true}
)
assert {:ok, %{data: %{"person" => %{"name" => "Bruce"}}}} ==
Absinthe.run(
@query_fragment,
Absinthe.Fixtures.ContactSchema,
variables: %{"includeAge" => false}
)
end
test "should return an error if the variable is not supplied" do
assert {:ok, %{errors: errors}} =
Absinthe.run(@query_fragment, Absinthe.Fixtures.ContactSchema)
assert [] != errors
end
end
describe "for inline fragments without type conditions" do
@query """
query Q($skipAge: Boolean = false) {
person {
name
... @skip(if: $skipAge) {
age
}
}
}
"""
test "works as expected" do
assert {:ok, %{data: %{"person" => %{"name" => "Bruce"}}}} ==
Absinthe.run(
@query,
Absinthe.Fixtures.ContactSchema,
variables: %{"skipAge" => true}
)
assert {:ok, %{data: %{"person" => %{"name" => "Bruce", "age" => 35}}}} ==
Absinthe.run(
@query,
Absinthe.Fixtures.ContactSchema,
variables: %{"skipAge" => false}
)
assert {:ok, %{data: %{"person" => %{"name" => "Bruce", "age" => 35}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ContactSchema)
end
end
describe "for inline fragments with type conditions" do
@query """
query Q($skipAge: Boolean = false) {
person {
name
... on Person @skip(if: $skipAge) {
age
}
}
}
"""
test "works as expected" do
assert {:ok, %{data: %{"person" => %{"name" => "Bruce"}}}} ==
Absinthe.run(
@query,
Absinthe.Fixtures.ContactSchema,
variables: %{"skipAge" => true}
)
assert {:ok, %{data: %{"person" => %{"name" => "Bruce", "age" => 35}}}} ==
Absinthe.run(
@query,
Absinthe.Fixtures.ContactSchema,
variables: %{"skipAge" => false}
)
assert {:ok, %{data: %{"person" => %{"name" => "Bruce", "age" => 35}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ContactSchema)
end
end
end
| 28.407895 | 98 | 0.514127 |
798b589464e65747fedad2b296b5d8245854644b | 12,667 | ex | Elixir | lib/elixir_ex_aliyun_ots_table_store_tunnel_read_records_request.ex | hou8/tablestore_protos | 1a3223326b92bbe196d57ce4dd19b5a8db1c728d | [
"MIT"
] | null | null | null | lib/elixir_ex_aliyun_ots_table_store_tunnel_read_records_request.ex | hou8/tablestore_protos | 1a3223326b92bbe196d57ce4dd19b5a8db1c728d | [
"MIT"
] | 1 | 2022-02-08T06:37:02.000Z | 2022-02-08T06:37:02.000Z | lib/elixir_ex_aliyun_ots_table_store_tunnel_read_records_request.ex | hou8/tablestore_protos | 1a3223326b92bbe196d57ce4dd19b5a8db1c728d | [
"MIT"
] | 2 | 2022-01-24T06:13:03.000Z | 2022-01-24T08:33:41.000Z | # credo:disable-for-this-file
defmodule(ExAliyunOts.TableStoreTunnel.ReadRecordsRequest) do
@moduledoc false
(
defstruct(tunnel_id: nil, client_id: nil, channel_id: nil, token: nil)
(
(
@spec encode(struct) :: {:ok, iodata} | {:error, any}
def(encode(msg)) do
try do
{:ok, encode!(msg)}
rescue
e in [Protox.EncodingError, Protox.RequiredFieldsError] ->
{:error, e}
end
end
@spec encode!(struct) :: iodata | no_return
def(encode!(msg)) do
[]
|> encode_tunnel_id(msg)
|> encode_client_id(msg)
|> encode_channel_id(msg)
|> encode_token(msg)
end
)
[]
[
defp(encode_tunnel_id(acc, msg)) do
try do
case(msg.tunnel_id) do
nil ->
raise(Protox.RequiredFieldsError.new([:tunnel_id]))
_ ->
[acc, "\n", Protox.Encode.encode_string(msg.tunnel_id)]
end
rescue
ArgumentError ->
reraise(Protox.EncodingError.new(:tunnel_id, "invalid field value"), __STACKTRACE__)
end
end,
defp(encode_client_id(acc, msg)) do
try do
case(msg.client_id) do
nil ->
raise(Protox.RequiredFieldsError.new([:client_id]))
_ ->
[acc, <<18>>, Protox.Encode.encode_string(msg.client_id)]
end
rescue
ArgumentError ->
reraise(Protox.EncodingError.new(:client_id, "invalid field value"), __STACKTRACE__)
end
end,
defp(encode_channel_id(acc, msg)) do
try do
case(msg.channel_id) do
nil ->
raise(Protox.RequiredFieldsError.new([:channel_id]))
_ ->
[acc, <<26>>, Protox.Encode.encode_string(msg.channel_id)]
end
rescue
ArgumentError ->
reraise(
Protox.EncodingError.new(:channel_id, "invalid field value"),
__STACKTRACE__
)
end
end,
defp(encode_token(acc, msg)) do
try do
case(msg.token) do
nil ->
raise(Protox.RequiredFieldsError.new([:token]))
_ ->
[acc, "\"", Protox.Encode.encode_string(msg.token)]
end
rescue
ArgumentError ->
reraise(Protox.EncodingError.new(:token, "invalid field value"), __STACKTRACE__)
end
end
]
[]
)
(
(
@spec decode(binary) :: {:ok, struct} | {:error, any}
def(decode(bytes)) do
try do
{:ok, decode!(bytes)}
rescue
e in [Protox.DecodingError, Protox.IllegalTagError, Protox.RequiredFieldsError] ->
{:error, e}
end
end
(
@spec decode!(binary) :: struct | no_return
def(decode!(bytes)) do
{msg, set_fields} =
parse_key_value([], bytes, struct(ExAliyunOts.TableStoreTunnel.ReadRecordsRequest))
case([:tunnel_id, :client_id, :channel_id, :token] -- set_fields) do
[] ->
msg
missing_fields ->
raise(Protox.RequiredFieldsError.new(missing_fields))
end
end
)
)
(
@spec parse_key_value([atom], binary, struct) :: {struct, [atom]}
defp(parse_key_value(set_fields, <<>>, msg)) do
{msg, set_fields}
end
defp(parse_key_value(set_fields, bytes, msg)) do
{new_set_fields, field, rest} =
case(Protox.Decode.parse_key(bytes)) do
{0, _, _} ->
raise(%Protox.IllegalTagError{})
{1, _, bytes} ->
{len, bytes} = Protox.Varint.decode(bytes)
{delimited, rest} = Protox.Decode.parse_delimited(bytes, len)
{[:tunnel_id | set_fields], [tunnel_id: delimited], rest}
{2, _, bytes} ->
{len, bytes} = Protox.Varint.decode(bytes)
{delimited, rest} = Protox.Decode.parse_delimited(bytes, len)
{[:client_id | set_fields], [client_id: delimited], rest}
{3, _, bytes} ->
{len, bytes} = Protox.Varint.decode(bytes)
{delimited, rest} = Protox.Decode.parse_delimited(bytes, len)
{[:channel_id | set_fields], [channel_id: delimited], rest}
{4, _, bytes} ->
{len, bytes} = Protox.Varint.decode(bytes)
{delimited, rest} = Protox.Decode.parse_delimited(bytes, len)
{[:token | set_fields], [token: delimited], rest}
{tag, wire_type, rest} ->
{_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest)
{set_fields, [], rest}
end
msg_updated = struct(msg, field)
parse_key_value(new_set_fields, rest, msg_updated)
end
)
[]
)
(
@spec json_decode(iodata(), keyword()) :: {:ok, struct()} | {:error, any()}
def(json_decode(input, opts \\ [])) do
try do
{:ok, json_decode!(input, opts)}
rescue
e in Protox.JsonDecodingError ->
{:error, e}
end
end
@spec json_decode!(iodata(), keyword()) :: struct() | no_return()
def(json_decode!(input, opts \\ [])) do
{json_library_wrapper, json_library} = Protox.JsonLibrary.get_library(opts, :decode)
Protox.JsonDecode.decode!(
input,
ExAliyunOts.TableStoreTunnel.ReadRecordsRequest,
&json_library_wrapper.decode!(json_library, &1)
)
end
@spec json_encode(struct(), keyword()) :: {:ok, iodata()} | {:error, any()}
def(json_encode(msg, opts \\ [])) do
try do
{:ok, json_encode!(msg, opts)}
rescue
e in Protox.JsonEncodingError ->
{:error, e}
end
end
@spec json_encode!(struct(), keyword()) :: iodata() | no_return()
def(json_encode!(msg, opts \\ [])) do
{json_library_wrapper, json_library} = Protox.JsonLibrary.get_library(opts, :encode)
Protox.JsonEncode.encode!(msg, &json_library_wrapper.encode!(json_library, &1))
end
)
@deprecated "Use fields_defs()/0 instead"
@spec defs() :: %{
required(non_neg_integer) => {atom, Protox.Types.kind(), Protox.Types.type()}
}
def(defs()) do
%{
1 => {:tunnel_id, {:scalar, ""}, :string},
2 => {:client_id, {:scalar, ""}, :string},
3 => {:channel_id, {:scalar, ""}, :string},
4 => {:token, {:scalar, ""}, :string}
}
end
@deprecated "Use fields_defs()/0 instead"
@spec defs_by_name() :: %{
required(atom) => {non_neg_integer, Protox.Types.kind(), Protox.Types.type()}
}
def(defs_by_name()) do
%{
channel_id: {3, {:scalar, ""}, :string},
client_id: {2, {:scalar, ""}, :string},
token: {4, {:scalar, ""}, :string},
tunnel_id: {1, {:scalar, ""}, :string}
}
end
@spec fields_defs() :: list(Protox.Field.t())
def(fields_defs()) do
[
%{
__struct__: Protox.Field,
json_name: "tunnelId",
kind: {:scalar, ""},
label: :required,
name: :tunnel_id,
tag: 1,
type: :string
},
%{
__struct__: Protox.Field,
json_name: "clientId",
kind: {:scalar, ""},
label: :required,
name: :client_id,
tag: 2,
type: :string
},
%{
__struct__: Protox.Field,
json_name: "channelId",
kind: {:scalar, ""},
label: :required,
name: :channel_id,
tag: 3,
type: :string
},
%{
__struct__: Protox.Field,
json_name: "token",
kind: {:scalar, ""},
label: :required,
name: :token,
tag: 4,
type: :string
}
]
end
[
@spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}),
(
def(field_def(:tunnel_id)) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "tunnelId",
kind: {:scalar, ""},
label: :required,
name: :tunnel_id,
tag: 1,
type: :string
}}
end
def(field_def("tunnelId")) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "tunnelId",
kind: {:scalar, ""},
label: :required,
name: :tunnel_id,
tag: 1,
type: :string
}}
end
def(field_def("tunnel_id")) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "tunnelId",
kind: {:scalar, ""},
label: :required,
name: :tunnel_id,
tag: 1,
type: :string
}}
end
),
(
def(field_def(:client_id)) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "clientId",
kind: {:scalar, ""},
label: :required,
name: :client_id,
tag: 2,
type: :string
}}
end
def(field_def("clientId")) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "clientId",
kind: {:scalar, ""},
label: :required,
name: :client_id,
tag: 2,
type: :string
}}
end
def(field_def("client_id")) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "clientId",
kind: {:scalar, ""},
label: :required,
name: :client_id,
tag: 2,
type: :string
}}
end
),
(
def(field_def(:channel_id)) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "channelId",
kind: {:scalar, ""},
label: :required,
name: :channel_id,
tag: 3,
type: :string
}}
end
def(field_def("channelId")) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "channelId",
kind: {:scalar, ""},
label: :required,
name: :channel_id,
tag: 3,
type: :string
}}
end
def(field_def("channel_id")) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "channelId",
kind: {:scalar, ""},
label: :required,
name: :channel_id,
tag: 3,
type: :string
}}
end
),
(
def(field_def(:token)) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "token",
kind: {:scalar, ""},
label: :required,
name: :token,
tag: 4,
type: :string
}}
end
def(field_def("token")) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "token",
kind: {:scalar, ""},
label: :required,
name: :token,
tag: 4,
type: :string
}}
end
[]
),
def(field_def(_)) do
{:error, :no_such_field}
end
]
[]
@spec required_fields() :: [((:tunnel_id | :client_id) | :channel_id) | :token]
def(required_fields()) do
[:tunnel_id, :client_id, :channel_id, :token]
end
@spec syntax() :: atom
def(syntax()) do
:proto2
end
[
@spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}),
def(default(:tunnel_id)) do
{:ok, ""}
end,
def(default(:client_id)) do
{:ok, ""}
end,
def(default(:channel_id)) do
{:ok, ""}
end,
def(default(:token)) do
{:ok, ""}
end,
def(default(_)) do
{:error, :no_such_field}
end
]
)
end | 27.358531 | 98 | 0.45812 |
798b5ed01cfa9133b667370e1a5e7175641fdb4d | 3,925 | ex | Elixir | lib/ex_autolink.ex | krepflap/ex_autolink | 941d405824131b90432f309abe2aad5a6a48464a | [
"MIT"
] | null | null | null | lib/ex_autolink.ex | krepflap/ex_autolink | 941d405824131b90432f309abe2aad5a6a48464a | [
"MIT"
] | null | null | null | lib/ex_autolink.ex | krepflap/ex_autolink | 941d405824131b90432f309abe2aad5a6a48464a | [
"MIT"
] | null | null | null | defmodule ExAutolink do
@moduledoc """
This simple module is used for converting http(s):// links in text to
HTML hyperlinks.
It doesn't depend on Phoenix.HTML, but can be used in conjuction with Phoenix.
"""
@doc ~S"""
This method is used for parsing strings or text blocks. We convert all links
starting with http:// or https:// to HTML links.
## Examples
iex> ExAutolink.link("https://elixir-lang.org")
"<a href=\"https://elixir-lang.org\">https://elixir-lang.org</a>"
iex> ExAutolink.link("Go here: https://elixir-lang.org/.")
"Go here: <a href=\"https://elixir-lang.org/\">https://elixir-lang.org/</a>."
iex> ExAutolink.link("https://ja.wikipedia.org/wiki/Elixir_(プログラミング言語)")
"<a href=\"https://ja.wikipedia.org/wiki/Elixir_(プログラミング言語)\">https://ja.wikipedia.org/wiki/Elixir_(プログラミング言語)</a>"
iex> ExAutolink.link("https://elixir-lang.org!", args: %{class: "myclass active", rel: "nofollow noreferrer"})
"<a href=\"https://elixir-lang.org\" class=\"myclass active\" rel=\"nofollow noreferrer\">https://elixir-lang.org</a>!"
"""
def link(text, options \\ [])
def link("", _options), do: ""
def link(text, []), do: build_link(text)
def link(text, args: args), do: build_link(text, build_arguments(args))
defp build_link(text, extra \\ <<>>) do
Regex.replace(~r{(https?://[^\s]+)}, text, fn url ->
{:ok, url_part, punctuation} =
url
|> reverse()
|> parse_punctuation()
~s(<a href="#{url_part}"#{extra}>#{url_part}</a>#{punctuation})
end)
end
# parse_punctuation/2 is used to handle punctuation by recursively scanning
# the string in reversed order. It is used to split punctuation from the
# actual url (trailing full stop, question marks, etc. i.e. anything not
# part of the url itself).
defp parse_punctuation(reversed, punctuation \\ <<>>)
# This matches cases when punctuation contains any kind of closing bracket.
defp parse_punctuation(<<?), reversed::binary>>, punctuation),
do: parse_brackets(reversed, punctuation, ?(, ?))
defp parse_punctuation(<<?], reversed::binary>>, punctuation),
do: parse_brackets(reversed, punctuation, ?[, ?])
defp parse_punctuation(<<?}, reversed::binary>>, punctuation),
do: parse_brackets(reversed, punctuation, ?{, ?})
defp parse_punctuation(<<last_char, reversed::binary>>, punctuation) do
if <<last_char>> =~ ~r/^[^\p{L}\p{N}\/-=&]$/ do
# The regex is used to match on punctuation characters.
#
# Anything that is NOT a letter, number, forward slash, dash, equal sign,
# or ampersand, is matched. We thus assume it is punctuation.
parse_punctuation(reversed, punctuation <> <<last_char>>)
else
{:ok, reverse(<<last_char>> <> reversed), reverse(punctuation)}
end
end
defp parse_brackets(reversed, punctuation, opening, closing) do
# We use find_opening/2 to search if there is a matching opening bracket
# earlier in the string.
case find_opening(reversed, opening, closing) do
{:found} -> {:ok, reverse(<<closing>> <> reversed), reverse(punctuation)}
{:not_found} -> parse_punctuation(reversed, punctuation <> <<closing>>)
end
end
defp find_opening(<<>>, _opening, _closing), do: {:not_found}
defp find_opening(<<last_char, reversed::binary>>, opening, closing) do
# Recursively look for the first encountered opening bracket, without
# stumbling upon another closing bracket.
case last_char do
^opening -> {:found}
^closing -> {:not_found}
_ -> find_opening(reversed, opening, closing)
end
end
defp build_arguments(args) when is_map(args) do
for {k, v} <- args, into: "", do: ~s( #{k}="#{v}")
end
defp reverse(binary, result \\ <<>>)
defp reverse(<<>>, result), do: result
defp reverse(<<c::utf8, rest::binary>>, result), do: reverse(rest, <<c::utf8>> <> result)
end
| 38.861386 | 125 | 0.654013 |
798b81d35451e88a59e2211f779a1f42ce17c147 | 1,671 | ex | Elixir | lib/arkecosystem/client/api/wallets.ex | ArkEcosystem/ARK-Elixir-Client | a62c0f63c3d490b2d8734384ae354444cc279a9a | [
"MIT"
] | 2 | 2018-07-13T23:05:08.000Z | 2019-02-06T10:27:08.000Z | lib/arkecosystem/client/api/wallets.ex | ArkEcosystem/ARK-Elixir-Client | a62c0f63c3d490b2d8734384ae354444cc279a9a | [
"MIT"
] | 59 | 2018-06-11T07:59:59.000Z | 2019-11-17T23:30:19.000Z | lib/arkecosystem/client/api/wallets.ex | ArkEcosystem/ARK-Elixir-Client | a62c0f63c3d490b2d8734384ae354444cc279a9a | [
"MIT"
] | 17 | 2018-07-02T16:10:25.000Z | 2020-11-23T23:43:55.000Z | defmodule ArkEcosystem.Client.API.Wallets do
@moduledoc """
Documentation for ArkEcosystem.Client.API.Wallets
"""
import ArkEcosystem.Client
@spec list(Tesla.Client.t(), Keyword.t()) :: ArkEcosystem.Client.response()
def list(client, parameters \\ []) do
client |> get("wallets", parameters)
end
@spec top(Tesla.Client.t(), Keyword.t()) :: ArkEcosystem.Client.response()
def top(client, parameters \\ []) do
client |> get("wallets/top", parameters)
end
@spec show(Tesla.Client.t(), String.t()) :: ArkEcosystem.Client.response()
def show(client, id) do
client |> get("wallets/#{id}")
end
@spec transactions(Tesla.Client.t(), String.t(), Keyword.t()) :: ArkEcosystem.Client.response()
def transactions(client, id, parameters \\ []) do
client |> get("wallets/#{id}/transactions", parameters)
end
@spec sent_transactions(Tesla.Client.t(), String.t(), Keyword.t()) ::
ArkEcosystem.Client.response()
def sent_transactions(client, id, parameters \\ []) do
client |> get("wallets/#{id}/transactions/sent", parameters)
end
@spec received_transactions(Tesla.Client.t(), String.t(), Keyword.t()) ::
ArkEcosystem.Client.response()
def received_transactions(client, id, parameters \\ []) do
client |> get("wallets/#{id}/transactions/received", parameters)
end
@spec votes(Tesla.Client.t(), String.t()) :: ArkEcosystem.Client.response()
def votes(client, id) do
client |> get("wallets/#{id}/votes")
end
@spec search(Tesla.Client.t(), Keyword.t()) :: ArkEcosystem.Client.response()
def search(client, parameters) do
client |> post("wallets/search", parameters)
end
end
| 33.42 | 97 | 0.67325 |
798b89dd44acf8943444b4ef73afb5ce66cb03ca | 30,434 | ex | Elixir | lib/ex_aws/s3.ex | bettyblocks/ex_aws | 0c9b43b5a1afbfb5fb26131be7f80f69de4431d1 | [
"MIT",
"Unlicense"
] | null | null | null | lib/ex_aws/s3.ex | bettyblocks/ex_aws | 0c9b43b5a1afbfb5fb26131be7f80f69de4431d1 | [
"MIT",
"Unlicense"
] | null | null | null | lib/ex_aws/s3.ex | bettyblocks/ex_aws | 0c9b43b5a1afbfb5fb26131be7f80f69de4431d1 | [
"MIT",
"Unlicense"
] | null | null | null | defmodule ExAws.S3 do
@moduledoc """
Operations on AWS S3
## Basic Operations
The vast majority of operations here represent a single operation on S3.
### Examples
```
S3.list_objects |> ExAws.request! #=> {:ok, %{body: [list, of, objects]}}
S3.list_objects |> ExAws.stream! |> Enum.to_list #=> [list, of, objects]
S3.put_object("my-bucket", "path/to/bucket", contents) |> ExAws.request!
```
## Higher Level Operations
There are also some operations which operate at a higher level to make it easier
to download and upload very large files.
Multipart uploads
```
"path/to/big/file"
|> S3.Upload.stream_file
|> S3.upload("my-bucket", "path/on/s3")
|> ExAws.request! #=> {:ok, :done}
```
Download large file to disk
```
S3.download_file("my-bucket", "path/on/s3", "path/to/dest/file")
|> ExAws.request! #=> {:on, :done}
```
## More high level functionality
Task.async_stream makes some high level flows so easy you don't need explicit ExAws support.
For example, here is how to concurrently upload many files.
```
upload_file = fn {src_path, dest_path} ->
S3.put_object("my_bucket", dest_path, File.read!(src_path))
|> ExAws.request!
end
paths = %{"path/to/src0" => "path/to/dest0", "path/to/src1" => "path/to/dest1"}
paths
|> Task.async_stream(upload_file, max_concurrency: 10)
|> Stream.run
```
"""
import ExAws.S3.Utils
alias ExAws.S3.Parsers
@type acl_opts :: [{:acl, canned_acl} | grant]
@type grant :: {:grant_read, grantee}
| {:grant_read_acp, grantee}
| {:grant_write_acp, grantee}
| {:grant_full_control, grantee}
@type canned_acl :: :private
| :public_read
| :public_read_write
| :authenticated_read
| :bucket_owner_read
| :bucket_owner_full_control
@type grantee :: [ {:email, binary}
| {:id, binary}
| {:uri, binary}
]
@type customer_encryption_opts :: [
customer_algorithm: binary,
customer_key: binary,
customer_key_md5: binary]
@type encryption_opts :: binary
| [aws_kms_key_id: binary]
| customer_encryption_opts
@type presigned_url_opts :: [
expires_in: integer,
virtual_host: boolean,
query_params: [{:key, binary}]
]
@type amz_meta_opts :: [{atom, binary} | {binary, binary}, ...]
## Buckets
#############
@doc "List buckets"
@spec list_buckets() :: ExAws.Operation.S3.t
@spec list_buckets(opts :: Keyword.t) :: ExAws.Operation.S3.t
def list_buckets(opts \\ []) do
request(:get, "", "/", params: opts)
end
@doc "Delete a bucket"
@spec delete_bucket(bucket :: binary) :: ExAws.Operation.S3.t
def delete_bucket(bucket) do
request(:delete, bucket, "/")
end
@doc "Delete a bucket cors"
@spec delete_bucket_cors(bucket :: binary) :: ExAws.Operation.S3.t
def delete_bucket_cors(bucket) do
request(:delete, bucket, "/", resource: "cors")
end
@doc "Delete a bucket lifecycle"
@spec delete_bucket_lifecycle(bucket :: binary) :: ExAws.Operation.S3.t
def delete_bucket_lifecycle(bucket) do
request(:delete, bucket, "/", resource: "lifecycle")
end
@doc "Delete a bucket policy"
@spec delete_bucket_policy(bucket :: binary) :: ExAws.Operation.S3.t
def delete_bucket_policy(bucket) do
request(:delete, bucket, "/", resource: "policy")
end
@doc "Delete a bucket replication"
@spec delete_bucket_replication(bucket :: binary) :: ExAws.Operation.S3.t
def delete_bucket_replication(bucket) do
request(:delete, bucket, "/", resource: "replication")
end
@doc "Delete a bucket tagging"
@spec delete_bucket_tagging(bucket :: binary) :: ExAws.Operation.S3.t
def delete_bucket_tagging(bucket) do
request(:delete, bucket, "/", resource: "tagging")
end
@doc "Delete a bucket website"
@spec delete_bucket_website(bucket :: binary) :: ExAws.Operation.S3.t
def delete_bucket_website(bucket) do
request(:delete, bucket, "/", resource: "website")
end
@type list_objects_opts :: [
{:delimiter, binary} |
{:marker, binary} |
{:prefix, binary} |
{:encoding_type, binary} |
{:max_keys, 0..1000}
]
@doc """
List objects in bucket
Can be streamed.
"""
@spec list_objects(bucket :: binary) :: ExAws.Operation.S3.t
@spec list_objects(bucket :: binary, opts :: list_objects_opts) :: ExAws.Operation.S3.t
@params [:delimiter, :marker, :prefix, :encoding_type, :max_keys]
def list_objects(bucket, opts \\ []) do
params = opts
|> format_and_take(@params)
request(:get, bucket, "/", [params: params],
stream_builder: &ExAws.S3.Lazy.stream_objects!(bucket, opts, &1),
parser: &ExAws.S3.Parsers.parse_list_objects/1
)
end
@doc "Get bucket acl"
@spec get_bucket_acl(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_acl(bucket) do
request(:get, bucket, "/", resource: "acl")
end
@doc "Get bucket cors"
@spec get_bucket_cors(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_cors(bucket) do
request(:get, bucket, "/", resource: "cors")
end
@doc "Get bucket lifecycle"
@spec get_bucket_lifecycle(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_lifecycle(bucket) do
request(:get, bucket, "/", resource: "lifecycle")
end
@doc "Get bucket policy"
@spec get_bucket_policy(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_policy(bucket) do
request(:get, bucket, "/", resource: "policy")
end
@doc "Get bucket location"
@spec get_bucket_location(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_location(bucket) do
request(:get, bucket, "/", resource: "location")
end
@doc "Get bucket logging"
@spec get_bucket_logging(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_logging(bucket) do
request(:get, bucket, "/", resource: "logging")
end
@doc "Get bucket notification"
@spec get_bucket_notification(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_notification(bucket) do
request(:get, bucket, "/", resource: "notification")
end
@doc "Get bucket replication"
@spec get_bucket_replication(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_replication(bucket) do
request(:get, bucket, "/", resource: "replication")
end
@doc "Get bucket tagging"
@spec get_bucket_tagging(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_tagging(bucket) do
request(:get, bucket, "/", resource: "tagging")
end
@doc "Get bucket object versions"
@spec get_bucket_object_versions(bucket :: binary) :: ExAws.Operation.S3.t
@spec get_bucket_object_versions(bucket :: binary, opts :: Keyword.t) :: ExAws.Operation.S3.t
def get_bucket_object_versions(bucket, opts \\ []) do
request(:get, bucket, "/", resource: "versions", params: opts)
end
@doc "Get bucket payment configuration"
@spec get_bucket_request_payment(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_request_payment(bucket) do
request(:get, bucket, "/", resource: "requestPayment")
end
@doc "Get bucket versioning"
@spec get_bucket_versioning(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_versioning(bucket) do
request(:get, bucket, "/", resource: "versioning")
end
@doc "Get bucket website"
@spec get_bucket_website(bucket :: binary) :: ExAws.Operation.S3.t
def get_bucket_website(bucket) do
request(:get, bucket, "/", resource: "website")
end
@doc "Determine if a bucket exists"
@spec head_bucket(bucket :: binary) :: ExAws.Operation.S3.t
def head_bucket(bucket) do
request(:head, bucket, "/")
end
@doc "List multipart uploads for a bucket"
@spec list_multipart_uploads(bucket :: binary) :: ExAws.Operation.S3.t
@spec list_multipart_uploads(bucket :: binary, opts :: Keyword.t) :: ExAws.Operation.S3.t
@params [:delimiter, :encoding_type, :max_uploads, :key_marker, :prefix, :upload_id_marker]
def list_multipart_uploads(bucket, opts \\ []) do
params = opts |> format_and_take(@params)
request(:get, bucket, "/", [resource: "uploads", params: params], %{parser: &Parsers.parse_list_multipart_uploads/1})
end
@doc "Creates a bucket. Same as create_bucket/2"
@spec put_bucket(bucket :: binary, region :: binary) :: ExAws.Operation.S3.t
def put_bucket(bucket, region, opts \\ []) do
headers = opts
|> Map.new
|> format_acl_headers
# us-east-1 region needs to be an empty string, cause AWS S3 API sucks.
region = if region == "us-east-1", do: "", else: region
body = """
<CreateBucketConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<LocationConstraint>#{region}</LocationConstraint>
</CreateBucketConfiguration>
"""
request(:put, bucket, "/", body: body, headers: headers)
end
@doc "Update or create a bucket bucket access control"
@spec put_bucket_acl(bucket :: binary, opts :: acl_opts) :: ExAws.Operation.S3.t
def put_bucket_acl(bucket, grants) do
request(:put, bucket, "/", headers: format_acl_headers(grants))
end
@doc "Update or create a bucket CORS policy"
@spec put_bucket_cors(bucket :: binary, cors_config :: %{}) :: ExAws.Operation.S3.t
def put_bucket_cors(bucket, cors_rules) do
rules = cors_rules
|> Enum.map(&build_cors_rule/1)
|> IO.iodata_to_binary
body = "<CORSConfiguration>#{rules}</CORSConfiguration>"
content_md5 = :crypto.hash(:md5, body) |> Base.encode64
headers = %{"content-md5" => content_md5}
request(:put, bucket, "/",
resource: "cors", body: body, headers: headers)
end
@doc "Update or create a bucket lifecycle configuration"
@spec put_bucket_lifecycle(bucket :: binary, lifecycle_config :: %{}) :: no_return
def put_bucket_lifecycle(bucket, _livecycle_config) do
raise "not yet implemented"
request(:put, bucket, "/")
end
@doc "Update or create a bucket policy configuration"
@spec put_bucket_policy(bucket :: binary, policy :: %{}) :: ExAws.Operation.S3.t
def put_bucket_policy(bucket, policy) do
request(:put, bucket, "/", resource: "policy", body: policy)
end
@doc "Update or create a bucket logging configuration"
@spec put_bucket_logging(bucket :: binary, logging_config :: %{}) :: no_return
def put_bucket_logging(bucket, _logging_config) do
raise "not yet implemented"
request(:put, bucket, "/")
end
@doc "Update or create a bucket notification configuration"
@spec put_bucket_notification(bucket :: binary, notification_config :: %{}) :: no_return
def put_bucket_notification(bucket, _notification_config) do
raise "not yet implemented"
request(:put, bucket, "/")
end
@doc "Update or create a bucket replication configuration"
@spec put_bucket_replication(bucket :: binary, replication_config :: %{}) :: no_return
def put_bucket_replication(bucket, _replication_config) do
raise "not yet implemented"
request(:put, bucket, "/")
end
@doc "Update or create a bucket tagging configuration"
@spec put_bucket_tagging(bucket :: binary, tags :: %{}) :: no_return
def put_bucket_tagging(bucket, _tags) do
raise "not yet implemented"
request(:put, bucket, "/")
end
@doc "Update or create a bucket requestPayment configuration"
@spec put_bucket_request_payment(bucket :: binary, payer :: :requester | :bucket_owner) :: no_return
def put_bucket_request_payment(bucket, _payer) do
raise "not yet implemented"
request(:put, bucket, "/")
end
@doc "Update or create a bucket versioning configuration"
@spec put_bucket_versioning(bucket :: binary, version_config :: binary) :: no_return
def put_bucket_versioning(bucket, _version_config) do
raise "not yet implemented"
request(:put, bucket, "/")
end
@doc "Update or create a bucket website configuration"
@spec put_bucket_website(bucket :: binary, website_config :: binary) :: no_return
def put_bucket_website(bucket, _website_config) do
raise "not yet implemented"
request(:put, bucket, "/")
end
## Objects
###########
@doc "Delete object object in bucket"
@spec delete_object(bucket :: binary, object :: binary) :: ExAws.Operation.S3.t
def delete_object(bucket, object, opts \\ []) do
request(:delete, bucket, object, headers: opts |> Map.new)
end
@doc """
Delete multiple objects within a bucket
Limited to 1000 objects.
"""
@spec delete_multiple_objects(
bucket :: binary,
objects :: [binary | {binary, binary}, ...]):: ExAws.Operation.S3.t
@spec delete_multiple_objects(
bucket :: binary,
objects :: [binary | {binary, binary}, ...], opts :: [quiet: true]):: ExAws.Operation.S3.t
def delete_multiple_objects(bucket, objects, opts \\ []) do
objects_xml = Enum.map(objects, fn
{key, version} -> ["<Object><Key>", key, "</Key><VersionId>", version, "</VersionId></Object>"]
key -> ["<Object><Key>", key, "</Key></Object>"]
end)
quiet = case opts do
[quiet: true] -> "<Quiet>true</Quiet>"
_ -> ""
end
body = [
~s(<?xml version="1.0" encoding="UTF-8"?>),
quiet,
"<Delete>",
objects_xml,
"</Delete>"
]
content_md5 = :crypto.hash(:md5, body) |> Base.encode64
body_binary = body |> IO.iodata_to_binary
request(:post, bucket, "/?delete", body: body_binary, headers: %{"content-md5" => content_md5})
end
@doc """
Delete all listed objects.
When performed, this function will continue making `delete_multiple_objects`
requests deleting 1000 objects at a time until all are deleted.
Can be streamed.
"""
@spec delete_all_objects(
bucket :: binary,
objects :: [binary | {binary, binary}, ...]):: ExAws.Operation.S3DeleteAllObjects.t
@spec delete_all_objects(
bucket :: binary,
objects :: [binary | {binary, binary}, ...], opts :: [quiet: true]):: ExAws.Operation.S3DeleteAllObjects.t
def delete_all_objects(bucket, objects, opts \\ []) do
%ExAws.Operation.S3DeleteAllObjects{bucket: bucket, objects: objects, opts: opts}
end
@type get_object_response_opts :: [
{:content_language, binary}
| {:expires, binary}
| {:cach_control, binary}
| {:content_disposition, binary}
| {:content_encoding, binary}
]
@type get_object_opts :: [
{:response, get_object_response_opts}
| head_object_opts
]
@doc "Get an object from a bucket"
@spec get_object(bucket :: binary, object :: binary) :: ExAws.Operation.S3.t
@spec get_object(bucket :: binary, object :: binary, opts :: get_object_opts) :: ExAws.Operation.S3.t
@response_params [:content_type, :content_language, :expires, :cache_control, :content_disposition, :content_encoding]
@request_headers [:range, :if_modified_since, :if_unmodified_since, :if_match, :if_none_match]
def get_object(bucket, object, opts \\ []) do
opts = opts |> Map.new
response_opts = opts
|> Map.get(:response, %{})
|> format_and_take(@response_params)
|> namespace("response")
headers = opts
|> format_and_take(@request_headers)
headers = opts
|> Map.get(:encryption, %{})
|> build_encryption_headers
|> Map.merge(headers)
request(:get, bucket, object, headers: headers, params: response_opts)
end
@type download_file_opts :: [
max_concurrency: pos_integer,
chunk_size: pos_integer,
timeout: 60_000,
]
@doc """
Download an S3 Object to a file.
This operation download multiple parts of an S3 object concurrently, allowing
you to maximize throughput.
Defaults to a concurrency of 8, chunk size of 1MB, and a timeout of 1 minute.
"""
@spec download_file(bucket :: binary, path :: binary, dest :: binary) :: __MODULE__.Download.t
@spec download_file(bucket :: binary, path :: binary, dest :: binary, opts :: download_file_opts) :: __MODULE__.Download.t
def download_file(bucket, path, dest, opts \\ []) do
%__MODULE__.Download{
bucket: bucket,
path: path,
dest: dest,
opts: opts
}
end
@type upload_opts :: [{:max_concurrency, pos_integer} | initiate_multipart_upload_opts ]
@doc """
Multipart upload to S3.
Handles initialization, uploading parts concurrently, and multipart upload completion.
## Uploading a stream
Streams that emit binaries may be uploaded directly to S3. Each binary will be uploaded
as a chunk, so it must be at least 5 megabytes in size. The `S3.Upload.stream_file`
helper takes care of reading the file in 5 megabyte chunks.
```
"path/to/big/file"
|> S3.Upload.stream_file
|> S3.upload("my-bucket", "path/on/s3")
|> ExAws.request! #=> :done
```
## Options
These options are specific to this function
* `:max_concurrency` -- The number of concurrent processes reading from this
stream. Only applies when uploading a stream.
All other options (ex. `:content_type`) are passed through to
`ExAws.S3.initiate_multipart_upload/3`.
"""
@spec upload(
source :: Enumerable.t,
bucket :: String.t,
path :: String.t,
opts :: upload_opts) :: __MODULE__.Upload.t
def upload(source, bucket, path, opts \\ []) do
%__MODULE__.Upload{
src: source,
bucket: bucket,
path: path,
opts: opts,
}
end
@doc "Get an object's access control policy"
@spec get_object_acl(bucket :: binary, object :: binary) :: ExAws.Operation.S3.t
@spec get_object_acl(bucket :: binary, object :: binary, opts :: Keyword.t) :: ExAws.Operation.S3.t
def get_object_acl(bucket, object, opts \\ []) do
request(:get, bucket, object, resource: "acl", headers: opts |> Map.new)
end
@doc "Get a torrent for a bucket"
@spec get_object_torrent(bucket :: binary, object :: binary) :: ExAws.Operation.S3.t
def get_object_torrent(bucket, object) do
request(:get, bucket, object, resource: "torrent")
end
@type head_object_opts :: [
{:encryption, customer_encryption_opts}
| {:range, binary}
| {:if_modified_since, binary}
| {:if_unmodified_since, binary}
| {:if_match, binary}
| {:if_none_match, binary}
]
@doc "Determine of an object exists"
@spec head_object(bucket :: binary, object :: binary) :: ExAws.Operation.S3.t
@spec head_object(bucket :: binary, object :: binary, opts :: head_object_opts) :: ExAws.Operation.S3.t
@request_headers [:range, :if_modified_since, :if_unmodified_since, :if_match, :if_none_match]
def head_object(bucket, object, opts \\ []) do
opts = opts |> Map.new
headers = opts
|> format_and_take(@request_headers)
headers = opts
|> Map.get(:encryption, %{})
|> build_encryption_headers
|> Map.merge(headers)
params = case Map.fetch(opts, :version_id) do
{:ok, id} -> %{"versionId" => id}
_ -> %{}
end
request(:head, bucket, object, headers: headers, params: params)
end
@doc "Determine the CORS configuration for an object"
@spec options_object(
bucket :: binary,
object :: binary,
origin :: binary,
request_method :: atom) :: ExAws.Operation.S3.t
@spec options_object(
bucket :: binary,
object :: binary,
origin :: binary,
request_method :: atom,
request_headers :: [binary]) :: ExAws.Operation.S3.t
def options_object(bucket, object, origin, request_method, request_headers \\ []) do
headers = [
{"Origin", origin},
{"Access-Control-Request-Method", request_method},
{"Access-Control-Request-Headers", request_headers |> Enum.join(",")},
]
request(:options, bucket, object, headers: headers)
end
@doc "Restore an object to a particular version"
@spec post_object_restore(
bucket :: binary,
object :: binary,
number_of_days :: pos_integer) :: ExAws.Operation.S3.t
@spec post_object_restore(
bucket :: binary,
object :: binary,
number_of_days :: pos_integer,
opts :: [version_id: binary]) :: ExAws.Operation.S3.t
def post_object_restore(bucket, object, number_of_days, opts \\ []) do
params = case Keyword.fetch(opts, :version_id) do
{:ok, id} -> %{"versionId" => id}
_ -> %{}
end
body = """
<RestoreRequest xmlns="http://s3.amazonaws.com/doc/2006-3-01">
<Days>#{number_of_days}</Days>
</RestoreRequest>
"""
request(:post, bucket, object, resource: "restore", params: params, body: body)
end
@type put_object_opts :: [
{:cache_control, binary}
| {:content_disposition, binary}
| {:content_encoding, binary}
| {:content_length, binary}
| {:content_type, binary}
| {:expect, binary}
| {:expires, binary}
| {:storage_class, :standard | :redunced_redundancy}
| {:website_redirect_location, binary}
| {:encryption, encryption_opts}
| {:meta, amz_meta_opts}
| acl_opts
]
@doc "Create an object within a bucket"
@spec put_object(bucket :: binary, object :: binary, body :: binary) :: ExAws.Operation.S3.t
@spec put_object(bucket :: binary, object :: binary, body :: binary, opts :: put_object_opts) :: ExAws.Operation.S3.t
def put_object(bucket, object, body, opts \\ []) do
request(:put, bucket, object, body: body, headers: put_object_headers(opts))
end
@doc "Create or update an object's access control FIXME"
@spec put_object_acl(bucket :: binary, object :: binary, acl :: acl_opts) :: ExAws.Operation.S3.t
def put_object_acl(bucket, object, acl) do
headers = acl |> Map.new |> format_acl_headers
request(:put, bucket, object, headers: headers, resource: "acl")
end
@type pub_object_copy_opts :: [
{:metadata_directive, :COPY | :REPLACE}
| {:copy_source_if_modified_since, binary}
| {:copy_source_if_unmodified_since, binary}
| {:copy_source_if_match, binary}
| {:copy_source_if_none_match, binary}
| {:website_redirect_location, binary}
| {:destination_encryption, encryption_opts}
| {:source_encryption, customer_encryption_opts}
| {:cache_control, binary}
| {:content_disposition, binary}
| {:content_encoding, binary}
| {:content_length, binary}
| {:content_type, binary}
| {:expect, binary}
| {:expires, binary}
| {:storage_class, :standard | :redunced_redundancy}
| {:website_redirect_location, binary}
| {:meta, amz_meta_opts}
| acl_opts
]
@doc "Copy an object"
@spec put_object_copy(
dest_bucket :: binary,
dest_object :: binary,
src_bucket :: binary,
src_object :: binary) :: ExAws.Operation.S3.t
@spec put_object_copy(
dest_bucket :: binary,
dest_object :: binary,
src_bucket :: binary,
src_object :: binary,
opts :: pub_object_copy_opts) :: ExAws.Operation.S3.t
@amz_headers ~w(
metadata_directive
copy_source_if_modified_since
copy_source_if_unmodified_since
copy_source_if_match
copy_source_if_none_match
storage_class
website_redirect_location)a
def put_object_copy(dest_bucket, dest_object, src_bucket, src_object, opts \\ []) do
opts = opts |> Map.new
amz_headers = opts
|> format_and_take(@amz_headers)
|> namespace("x-amz")
source_encryption = opts
|> Map.get(:source_encryption, %{})
|> build_encryption_headers
|> Enum.into(%{}, fn {<<"x-amz", k :: binary>>, v} ->
{"x-amz-copy-source" <> k, v}
end)
destination_encryption = opts
|> Map.get(:destination_encryption, %{})
|> build_encryption_headers
regular_headers = opts
|> Map.delete(:encryption)
|> put_object_headers
headers = regular_headers
|> Map.merge(amz_headers)
|> Map.merge(source_encryption)
|> Map.merge(destination_encryption)
|> Map.put("x-amz-copy-source", URI.encode "/#{src_bucket}/#{src_object}")
request(:put, dest_bucket, dest_object, headers: headers)
end
@type initiate_multipart_upload_opts :: [ {:cache_control, binary}
| {:content_disposition, binary}
| {:content_encoding, binary}
| {:content_type, binary}
| {:expires, binary}
| {:storage_class, :standard | :redunced_redundancy}
| {:website_redirect_location, binary}
| {:encryption, encryption_opts}
| acl_opts
]
@doc "Initiate a multipart upload"
@spec initiate_multipart_upload(bucket :: binary, object :: binary) :: ExAws.Operation.S3.t
@spec initiate_multipart_upload(bucket :: binary, object :: binary, opts :: initiate_multipart_upload_opts) :: ExAws.Operation.S3.t
def initiate_multipart_upload(bucket, object, opts \\ []) do
request(:post, bucket, object, [resource: "uploads", headers: put_object_headers(opts)], %{parser: &Parsers.parse_initiate_multipart_upload/1})
end
@doc "Upload a part for a multipart upload"
@spec upload_part(
bucket :: binary,
object :: binary,
upload_id :: binary,
part_number :: pos_integer,
body :: binary) :: ExAws.Operation.S3.t
@spec upload_part(
bucket :: binary,
object :: binary,
upload_id :: binary,
part_number :: pos_integer,
body :: binary,
opts :: [encryption_opts | {:expect, binary}]) :: ExAws.Operation.S3.t
def upload_part(bucket, object, upload_id, part_number, body, _opts \\ []) do
params = %{"uploadId" => upload_id, "partNumber" => part_number}
request(:put, bucket, object, params: params, body: body)
end
@type upload_part_copy_opts :: [
{:copy_source_range, Range.t}
| {:copy_source_if_modified_since, binary}
| {:copy_source_if_unmodified_since, binary}
| {:copy_source_if_match, binary}
| {:copy_source_if_none_match, binary}
| {:destination_encryption, encryption_opts}
| {:source_encryption, customer_encryption_opts}
]
@doc "Upload a part for a multipart copy"
@spec upload_part_copy(
dest_bucket :: binary,
dest_object :: binary,
src_bucket :: binary,
src_object :: binary) :: ExAws.Operation.S3.t
@spec upload_part_copy(
dest_bucket :: binary,
dest_object :: binary,
src_bucket :: binary,
src_object :: binary,
opts :: upload_part_copy_opts) :: ExAws.Operation.S3.t
@amz_headers ~w(
copy_source_if_modified_since
copy_source_if_unmodified_since
copy_source_if_match
copy_source_if_none_match)a
def upload_part_copy(dest_bucket, dest_object, src_bucket, src_object, opts \\ []) do
opts = opts |> Map.new
source_encryption = opts
|> Map.get(:source_encryption, %{})
|> build_encryption_headers
|> Enum.into(%{}, fn {<<"x-amz", k :: binary>>, v} ->
{"x-amz-copy-source" <> k, v}
end)
destination_encryption = opts
|> Map.get(:destination_encryption, %{})
|> build_encryption_headers
headers = opts
|> format_and_take(@amz_headers)
|> namespace("x-amz")
|> Map.merge(source_encryption)
|> Map.merge(destination_encryption)
headers = case opts do
%{copy_source_range: first..last} -> Map.put(headers, "x-amz-copy-source-range", "bytes=#{first}-#{last}")
_ -> headers
end
|> Map.put("x-amz-copy-source", "/#{src_bucket}/#{src_object}")
request(:put, dest_bucket, dest_object, [headers: headers], %{parser: &Parsers.parse_upload_part_copy/1})
end
@doc "Complete a multipart upload"
@spec complete_multipart_upload(
bucket :: binary,
object :: binary,
upload_id :: binary,
parts :: [{binary | pos_integer, binary}, ...]) :: ExAws.Operation.S3.t
def complete_multipart_upload(bucket, object, upload_id, parts) do
parts_xml = parts
|> Enum.map(fn {part_number, etag}->
["<Part>",
"<PartNumber>", Integer.to_string(part_number), "</PartNumber>",
"<ETag>", etag, "</ETag>",
"</Part>"]
end)
body = ["<CompleteMultipartUpload>", parts_xml, "</CompleteMultipartUpload>"]
|> IO.iodata_to_binary
request(:post, bucket, object, [params: %{"uploadId" => upload_id}, body: body], %{parser: &Parsers.parse_complete_multipart_upload/1})
end
@doc "Abort a multipart upload"
@spec abort_multipart_upload(bucket :: binary, object :: binary, upload_id :: binary) :: ExAws.Operation.S3.t
def abort_multipart_upload(bucket, object, upload_id) do
request(:delete, bucket, object, params: %{"uploadId" => upload_id})
end
@doc "List the parts of a multipart upload"
@spec list_parts(bucket :: binary, object :: binary, upload_id :: binary) :: ExAws.Operation.S3.t
@spec list_parts(bucket :: binary, object :: binary, upload_id :: binary, opts :: Keyword.t) :: ExAws.Operation.S3.t
def list_parts(bucket, object, upload_id, opts \\ []) do
params = opts
|> Map.new
|> Map.merge(%{"uploadId" => upload_id})
request(:get, bucket, object, [params: params], %{parser: &Parsers.parse_list_parts/1})
end
@doc """
Generates a pre-signed URL for this object.
When option param :virtual_host is `true`, the {#bucket} name will be used as
the hostname. This will cause the returned URL to be 'http' and not 'https'.
Additional (signed) query parameters can be added to the url by setting option param
`:query_params` to a list of `{"key", "value"}` pairs. Useful if you are uploading parts of
a multipart upload directly from the browser.
"""
@spec presigned_url(config :: %{}, http_method :: atom, bucket :: binary, object :: binary, opts :: presigned_url_opts) :: {:ok, binary} | {:error, binary}
@one_week 60 * 60 * 24 * 7
def presigned_url(config, http_method, bucket, object, opts \\ []) do
expires_in = Keyword.get(opts, :expires_in, 3600)
virtual_host = Keyword.get(opts, :virtual_host, false)
query_params = Keyword.get(opts, :query_params, [])
case expires_in > @one_week do
true -> {:error, "expires_in_exceeds_one_week"}
false ->
url = url_to_sign(bucket, object, config, virtual_host)
datetime = :calendar.universal_time
{:ok, ExAws.Auth.presigned_url(http_method, url, :s3, datetime, config, expires_in, query_params)}
end
end
defp url_to_sign(bucket, object, config, virtual_host) do
object = ensure_slash(object)
case virtual_host do
true -> "#{config[:scheme]}#{bucket}.#{config[:host]}#{object}"
false -> "#{config[:scheme]}#{config[:host]}/#{bucket}#{object}"
end
end
defp request(http_method, bucket, path, data \\ [], opts \\ %{}) do
%ExAws.Operation.S3{
http_method: http_method,
bucket: bucket,
path: path,
body: data[:body] || "",
headers: data[:headers] || %{},
resource: data[:resource] || "",
params: data[:params] || %{}
} |> struct(opts)
end
end
| 33.966518 | 157 | 0.668693 |
798bcee27ed37d9536582b1015368ff6c9ca7b9d | 520 | exs | Elixir | test/aoc2021/day1_test.exs | jarimatti/aoc-2021 | 02ea9a8137617d0a43f46a5cfe589d69572b653e | [
"MIT"
] | null | null | null | test/aoc2021/day1_test.exs | jarimatti/aoc-2021 | 02ea9a8137617d0a43f46a5cfe589d69572b653e | [
"MIT"
] | null | null | null | test/aoc2021/day1_test.exs | jarimatti/aoc-2021 | 02ea9a8137617d0a43f46a5cfe589d69572b653e | [
"MIT"
] | null | null | null | defmodule Aoc2021.Day1Test do
use ExUnit.Case, async: true
doctest Aoc2021.Day1
test "increment count produces 7 for part 1 example" do
input = [
199,
200,
208,
210,
200,
207,
240,
269,
260,
263
]
assert Aoc2021.Day1.count_increments(input) == 7
end
test "part 1 solution is correct" do
assert Aoc2021.Day1.solve_part1() == 1791
end
test "part 2 solution is correct" do
assert Aoc2021.Day1.solve_part2() == 1822
end
end
| 17.333333 | 57 | 0.607692 |
798c080f65e1e00a8bb6046fcc30911dcdad7ee1 | 6,333 | exs | Elixir | test/phoenix/integration/endpoint_test.exs | rrrcompagnoni/phoenix | ca7488815a9b5bcaee0b9afee10162d177a2b90d | [
"MIT"
] | 1 | 2020-04-14T09:49:46.000Z | 2020-04-14T09:49:46.000Z | test/phoenix/integration/endpoint_test.exs | rrrcompagnoni/phoenix | ca7488815a9b5bcaee0b9afee10162d177a2b90d | [
"MIT"
] | 1 | 2020-11-08T08:30:10.000Z | 2020-11-08T08:30:10.000Z | test/phoenix/integration/endpoint_test.exs | rrrcompagnoni/phoenix | ca7488815a9b5bcaee0b9afee10162d177a2b90d | [
"MIT"
] | null | null | null | Code.require_file "../../support/http_client.exs", __DIR__
defmodule Phoenix.Integration.EndpointTest do
use ExUnit.Case
import ExUnit.CaptureLog
alias Phoenix.Integration.AdapterTest.ProdEndpoint
alias Phoenix.Integration.AdapterTest.DevEndpoint
alias Phoenix.Integration.AdapterTest.ProdInet6Endpoint
Application.put_env(:endpoint_int, ProdEndpoint,
http: [port: "4807"], url: [host: "example.com"], server: true, drainer: false,
render_errors: [accepts: ~w(html json)])
Application.put_env(:endpoint_int, DevEndpoint,
http: [port: "4808"], debug_errors: true, drainer: false)
if hd(Application.spec(:plug_cowboy, :vsn)) == ?1 do
# Cowboy v1
Application.put_env(:endpoint_int, ProdInet6Endpoint,
http: [{:port, "4809"}, :inet6],
url: [host: "example.com"], server: true)
else
# Cowboy v2
Application.put_env(:endpoint_int, ProdInet6Endpoint,
http: [port: "4809", transport_options: [socket_opts: [:inet6]]],
url: [host: "example.com"],
server: true)
end
defmodule Router do
@moduledoc """
Let's use a plug router to test this endpoint.
"""
use Plug.Router
plug :match
plug :dispatch
get "/" do
send_resp conn, 200, "ok"
end
get "/router/oops" do
_ = conn
raise "oops"
end
match _ do
raise Phoenix.Router.NoRouteError, conn: conn, router: __MODULE__
end
def __routes__ do
[]
end
end
defmodule Wrapper do
@moduledoc """
A wrapper around the endpoint call to extract information.
This exists so we can verify that the exception handling
in the Phoenix endpoint is working as expected. In order
to do that, we need to wrap the endpoint.call/2 in a
before compile callback so it wraps the whole stack,
including render errors and debug errors functionality.
"""
defmacro __before_compile__(_) do
quote do
defoverridable [call: 2]
def call(conn, opts) do
# Assert we never have a lingering sent message in the inbox
refute_received {:plug_conn, :sent}
try do
super(conn, opts)
after
# When we pipe downstream, downstream will always render,
# either because the router is responding or because the
# endpoint error layer is kicking in.
assert_received {:plug_conn, :sent}
send self(), {:plug_conn, :sent}
end
end
end
end
end
for mod <- [ProdEndpoint, DevEndpoint, ProdInet6Endpoint] do
defmodule mod do
use Phoenix.Endpoint, otp_app: :endpoint_int
@before_compile Wrapper
plug :oops
plug Router
@doc """
Verify errors from the plug stack too (before the router).
"""
def oops(conn, _opts) do
if conn.path_info == ~w(oops) do
raise "oops"
else
conn
end
end
end
end
@prod 4807
@dev 4808
alias Phoenix.Integration.HTTPClient
@tag :cowboy2
test "starts drainer in supervision tree if configured" do
capture_log fn ->
{:ok, _} = ProdInet6Endpoint.start_link()
assert List.keyfind(Supervisor.which_children(ProdInet6Endpoint), Plug.Cowboy.Drainer, 0)
Supervisor.stop(ProdInet6Endpoint)
{:ok, _} = ProdEndpoint.start_link()
refute List.keyfind(Supervisor.which_children(ProdEndpoint), Plug.Cowboy.Drainer, 0)
Supervisor.stop(ProdEndpoint)
end
end
test "adapters starts on configured port and serves requests and stops for prod" do
capture_log fn ->
# Has server: true
{:ok, _} = ProdEndpoint.start_link()
# Requests
{:ok, resp} = HTTPClient.request(:get, "http://127.0.0.1:#{@prod}", %{})
assert resp.status == 200
assert resp.body == "ok"
{:ok, resp} = HTTPClient.request(:get, "http://127.0.0.1:#{@prod}/unknown", %{})
assert resp.status == 404
assert resp.body == "404.html from Phoenix.ErrorView"
{:ok, resp} = HTTPClient.request(:get, "http://127.0.0.1:#{@prod}/unknown?_format=json", %{})
assert resp.status == 404
assert resp.body |> Phoenix.json_library().decode!() == %{"error" => "Got 404 from error with GET"}
assert capture_log(fn ->
{:ok, resp} = HTTPClient.request(:get, "http://127.0.0.1:#{@prod}/oops", %{})
assert resp.status == 500
assert resp.body == "500.html from Phoenix.ErrorView"
{:ok, resp} = HTTPClient.request(:get, "http://127.0.0.1:#{@prod}/router/oops", %{})
assert resp.status == 500
assert resp.body == "500.html from Phoenix.ErrorView"
Supervisor.stop(ProdEndpoint)
end) =~ "** (RuntimeError) oops"
{:error, _reason} = HTTPClient.request(:get, "http://127.0.0.1:#{@prod}", %{})
end
end
test "adapters starts on configured port and serves requests and stops for dev" do
# Toggle globally
serve_endpoints(true)
on_exit(fn -> serve_endpoints(false) end)
capture_log fn ->
# Has server: false
{:ok, _} = DevEndpoint.start_link()
# Requests
{:ok, resp} = HTTPClient.request(:get, "http://127.0.0.1:#{@dev}", %{})
assert resp.status == 200
assert resp.body == "ok"
{:ok, resp} = HTTPClient.request(:get, "http://127.0.0.1:#{@dev}/unknown", %{})
assert resp.status == 404
assert resp.body =~ "NoRouteError at GET /unknown"
assert capture_log(fn ->
{:ok, resp} = HTTPClient.request(:get, "http://127.0.0.1:#{@dev}/oops", %{})
assert resp.status == 500
assert resp.body =~ "RuntimeError at GET /oops"
{:ok, resp} = HTTPClient.request(:get, "http://127.0.0.1:#{@dev}/router/oops", %{})
assert resp.status == 500
assert resp.body =~ "RuntimeError at GET /router/oops"
Supervisor.stop(DevEndpoint)
end) =~ "** (RuntimeError) oops"
{:error, _reason} = HTTPClient.request(:get, "http://127.0.0.1:#{@dev}", %{})
end
end
test "adapters starts on configured port and inet6 for prod" do
capture_log fn ->
# Has server: true
{:ok, _} = ProdInet6Endpoint.start_link()
Supervisor.stop(ProdInet6Endpoint)
end
end
defp serve_endpoints(bool) do
Application.put_env(:phoenix, :serve_endpoints, bool)
end
end
| 30.157143 | 105 | 0.62498 |
798c48c2977d323b684327515a430c8391b27379 | 3,960 | exs | Elixir | test/changelog_web/controllers/person_controller_test.exs | joebew42/changelog.com | da4ec68d15f3a2b4b6c29033443d7e7afe814d18 | [
"MIT"
] | 1 | 2018-01-22T20:07:10.000Z | 2018-01-22T20:07:10.000Z | test/changelog_web/controllers/person_controller_test.exs | joebew42/changelog.com | da4ec68d15f3a2b4b6c29033443d7e7afe814d18 | [
"MIT"
] | null | null | null | test/changelog_web/controllers/person_controller_test.exs | joebew42/changelog.com | da4ec68d15f3a2b4b6c29033443d7e7afe814d18 | [
"MIT"
] | null | null | null | defmodule ChangelogWeb.PersonControllerTest do
use ChangelogWeb.ConnCase
use Bamboo.Test
import Mock
alias Changelog.{Newsletters, Person}
describe "joining" do
test "getting the form", %{conn: conn} do
conn = get(conn, person_path(conn, :join))
assert conn.status == 200
assert conn.resp_body =~ "form"
end
@tag :as_user
test "getting form when signed in is not allowed", %{conn: conn} do
conn = get(conn, person_path(conn, :join))
assert html_response(conn, 302)
assert conn.halted
end
test "submission with missing data re-renders with errors", %{conn: conn} do
count_before = count(Person)
conn = post(conn, person_path(conn, :join), person: %{email: "nope"})
assert html_response(conn, 200) =~ ~r/wrong/i
assert count(Person) == count_before
end
test "submission with required data creates person, sends email, and redirects", %{conn: conn} do
count_before = count(Person)
conn = with_mock Craisin.Subscriber, [subscribe: fn(_, _, _) -> nil end] do
post(conn, person_path(conn, :join), person: %{email: "[email protected]", name: "Joe Blow", handle: "joeblow"})
end
person = Repo.one(from p in Person, where: p.email == "[email protected]")
assert_delivered_email ChangelogWeb.Email.community_welcome(person)
assert redirected_to(conn) == root_path(conn, :index)
assert count(Person) == count_before + 1
end
test "submission with existing email sends email, redirects, but doesn't create new person", %{conn: conn} do
existing = insert(:person)
count_before = count(Person)
conn = with_mock Craisin.Subscriber, [subscribe: fn(_, _, _) -> nil end] do
post(conn, person_path(conn, :join), person: %{email: existing.email, name: "Joe Blow", handle: "joeblow"})
end
existing = Repo.one(from p in Person, where: p.email == ^existing.email)
assert_delivered_email ChangelogWeb.Email.community_welcome(existing)
assert redirected_to(conn) == root_path(conn, :index)
assert count(Person) == count_before
end
end
describe "subscribing" do
test "getting the form", %{conn: conn} do
conn = get(conn, person_path(conn, :subscribe))
assert conn.status == 200
assert conn.resp_body =~ "form"
end
@tag :as_user
test "getting form when signed in is not allowed", %{conn: conn} do
conn = get(conn, person_path(conn, :subscribe))
assert html_response(conn, 302)
assert conn.halted
end
test "with required data creates person, subscribes, sends email, redirects", %{conn: conn} do
with_mock(Craisin.Subscriber, [subscribe: fn(_, _) -> nil end]) do
count_before = count(Person)
conn = post(conn, person_path(conn, :subscribe), email: "[email protected]")
person = Repo.one(from p in Person, where: p.email == "[email protected]")
assert called(Craisin.Subscriber.subscribe(Newsletters.weekly().list_id, :_))
assert_delivered_email ChangelogWeb.Email.subscriber_welcome(person)
assert redirected_to(conn) == root_path(conn, :index)
assert count(Person) == count_before + 1
end
end
test "with existing email subscribes, sends email, redirects, but doesn't create person", %{conn: conn} do
with_mock(Craisin.Subscriber, [subscribe: fn(_, _) -> nil end]) do
existing = insert(:person)
count_before = count(Person)
conn = post(conn, person_path(conn, :subscribe), email: existing.email, list: "nightly")
existing = Repo.one(from p in Person, where: p.email == ^existing.email)
assert called(Craisin.Subscriber.subscribe(Newsletters.nightly().list_id, :_))
assert_delivered_email ChangelogWeb.Email.subscriber_welcome(existing)
assert redirected_to(conn) == root_path(conn, :index)
assert count(Person) == count_before
end
end
end
end
| 37.009346 | 115 | 0.664899 |
798c68118f025b13cffe5a89530f23e960a066e0 | 275 | ex | Elixir | lib/team_budget_graphql/resolvers/user_resolver.ex | allefgomes/team_budget | cdd02c10fefe6c844a477aae361e6f7a41d4f734 | [
"MIT"
] | null | null | null | lib/team_budget_graphql/resolvers/user_resolver.ex | allefgomes/team_budget | cdd02c10fefe6c844a477aae361e6f7a41d4f734 | [
"MIT"
] | 2 | 2021-07-30T15:19:20.000Z | 2021-07-31T15:06:24.000Z | lib/team_budget_graphql/resolvers/user_resolver.ex | allefgomes/team_budget | cdd02c10fefe6c844a477aae361e6f7a41d4f734 | [
"MIT"
] | null | null | null | defmodule TeamBudgetGraphql.Resolvers.UserResolver do
alias TeamBudget.Accounts
def list_users(_parant, _params, _resolutions) do
{:ok, Accounts.list_users()}
end
def create_user(_parant, %{user: user}, _resolutions) do
Accounts.create_user(user)
end
end
| 22.916667 | 58 | 0.756364 |
798ca18659c8334eb180287afd9d27d0a7e5bca9 | 2,645 | exs | Elixir | test/mssql_ecto/delete_all_test.exs | nikneroz/mssql_ecto | d010b6c9c9041756353fc8184fa7e6368103cfac | [
"Apache-2.0"
] | null | null | null | test/mssql_ecto/delete_all_test.exs | nikneroz/mssql_ecto | d010b6c9c9041756353fc8184fa7e6368103cfac | [
"Apache-2.0"
] | null | null | null | test/mssql_ecto/delete_all_test.exs | nikneroz/mssql_ecto | d010b6c9c9041756353fc8184fa7e6368103cfac | [
"Apache-2.0"
] | null | null | null | defmodule MssqlEcto.DeleteAllTest do
use MssqlEcto.Case, async: true
import Ecto.Query
alias Ecto.Queryable
defmodule Schema do
use Ecto.Schema
schema "schema" do
field(:x, :integer)
field(:y, :integer)
field(:z, :integer)
field(:w, {:array, :integer})
has_many(
:comments,
MssqlEcto.DeleteAllTest.Schema2,
references: :x,
foreign_key: :z
)
has_one(
:permalink,
MssqlEcto.DeleteAllTest.Schema3,
references: :y,
foreign_key: :id
)
end
end
defmodule Schema2 do
use Ecto.Schema
schema "schema2" do
belongs_to(
:post,
MssqlEcto.DeleteAllTest.Schema,
references: :x,
foreign_key: :z
)
end
end
defmodule Schema3 do
use Ecto.Schema
schema "schema3" do
field(:list1, {:array, :string})
field(:list2, {:array, :integer})
field(:binary, :binary)
end
end
test "delete all" do
query = Schema |> Queryable.to_query() |> normalize
assert SQL.delete_all(query) == ~s{DELETE s0 FROM "schema" AS s0}
query = from(e in Schema, where: e.x == 123) |> normalize
assert SQL.delete_all(query) ==
~s{DELETE s0 FROM "schema" AS s0 WHERE (s0."x" = 123)}
query = Schema |> join(:inner, [p], q in Schema2, p.x == q.z) |> normalize
assert SQL.delete_all(query) ==
~s{DELETE s0 FROM "schema" AS s0 INNER JOIN "schema2" AS s1 ON (s0."x" = s1."z")}
query =
from(e in Schema, where: e.x == 123, join: q in Schema2, on: e.x == q.z)
|> normalize
assert SQL.delete_all(query) ==
~s{DELETE s0 FROM "schema" AS s0 INNER JOIN "schema2" AS s1 ON (s0."x" = s1."z") WHERE (s0."x" = 123)}
query =
from(
e in Schema,
where: e.x == 123,
join: assoc(e, :comments),
join: assoc(e, :permalink)
)
|> normalize
assert SQL.delete_all(query) ==
~s{DELETE s0 FROM "schema" AS s0 INNER JOIN "schema2" AS s1 ON (s1."z" = s0."x") INNER JOIN "schema3" AS s2 ON (s2."id" = s0."y") WHERE (s0."x" = 123)}
end
test "delete all with returning" do
query = Schema |> Queryable.to_query() |> select([m], m) |> normalize
assert SQL.delete_all(query) ==
~s{DELETE s0 OUTPUT DELETED."id", DELETED."x", DELETED."y", DELETED."z", DELETED."w" FROM "schema" AS s0}
end
test "delete all with prefix" do
query = Schema |> Queryable.to_query() |> normalize
assert SQL.delete_all(%{query | prefix: "prefix"}) ==
~s{DELETE s0 FROM "prefix"."schema" AS s0}
end
end
| 25.679612 | 164 | 0.572779 |
798cb4d1dc721a05c20caab49a16f249e048ab92 | 2,309 | ex | Elixir | clients/dataflow/lib/google_api/dataflow/v1b3/model/parameter_metadata.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/parameter_metadata.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/parameter_metadata.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dataflow.V1b3.Model.ParameterMetadata do
@moduledoc """
Metadata for a specific parameter.
## Attributes
* `helpText` (*type:* `String.t`, *default:* `nil`) - Required. The help text to display for the parameter.
* `isOptional` (*type:* `boolean()`, *default:* `nil`) - Optional. Whether the parameter is optional. Defaults to false.
* `label` (*type:* `String.t`, *default:* `nil`) - Required. The label to display for the parameter.
* `name` (*type:* `String.t`, *default:* `nil`) - Required. The name of the parameter.
* `paramType` (*type:* `String.t`, *default:* `nil`) - Optional. The type of the parameter. Used for selecting input picker.
* `regexes` (*type:* `list(String.t)`, *default:* `nil`) - Optional. Regexes that the parameter must match.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:helpText => String.t() | nil,
:isOptional => boolean() | nil,
:label => String.t() | nil,
:name => String.t() | nil,
:paramType => String.t() | nil,
:regexes => list(String.t()) | nil
}
field(:helpText)
field(:isOptional)
field(:label)
field(:name)
field(:paramType)
field(:regexes, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Dataflow.V1b3.Model.ParameterMetadata do
def decode(value, options) do
GoogleApi.Dataflow.V1b3.Model.ParameterMetadata.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataflow.V1b3.Model.ParameterMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.241935 | 128 | 0.684279 |
798cca17d725e5cdc934f24bf3a66dadce3c8079 | 24,000 | ex | Elixir | lib/future_made_concerts/spotify/client/http.ex | Future-Made/concerts-for-impact | 5532cd1be5252fa0ccb0b956f0961be8701e0e04 | [
"MIT"
] | null | null | null | lib/future_made_concerts/spotify/client/http.ex | Future-Made/concerts-for-impact | 5532cd1be5252fa0ccb0b956f0961be8701e0e04 | [
"MIT"
] | null | null | null | lib/future_made_concerts/spotify/client/http.ex | Future-Made/concerts-for-impact | 5532cd1be5252fa0ccb0b956f0961be8701e0e04 | [
"MIT"
] | null | null | null | defmodule FutureMadeConcerts.Spotify.Client.HTTP do
@moduledoc """
This module implements the `FutureMadeConcerts.Spotify.Client` behaviour and interacts with the actual Spotify API.
Reference docs are available at: <https://developer.spotify.com/documentation/web-api/>
"""
@behaviour FutureMadeConcerts.Spotify.Client
alias FutureMadeConcerts.Spotify.Schema.{
Album,
Artist,
Copyright,
Device,
Episode,
Player,
Playlist,
Publisher,
Show,
Track,
User
}
alias FutureMadeConcerts.Spotify.Auth
alias Ueberauth.Auth.Credentials
require Logger
@base_url "https://api.spotify.com/v1"
@refresh_url "https://accounts.spotify.com/api/token"
@json_headers [
{"Accept", "application/json"},
{"Content-Type", "application/json"}
]
@form_headers [
{"Content-Type", "application/x-www-form-urlencoded"}
]
@default_limit 20
@default_offset 0
################################################################################
################################ AUTH/PROFILE ##################################
################################################################################
@impl true
def get_profile(token) do
case json_get(@base_url <> "/me", auth_headers(token)) do
{:ok, %{status: 200} = response} ->
user =
response.body
|> Jason.decode!()
|> parse_profile()
{:ok, user}
other_response ->
handle_errors(other_response)
end
end
@impl true
def get_token(refresh_token) do
headers = [
{"Authorization", "Basic #{Auth.base64_encoded_credentials()}"}
]
case form_post(
@refresh_url,
%{grant_type: "refresh_token", refresh_token: refresh_token},
headers
) do
{:ok, %{status: 200} = response} ->
auth_data =
response.body
|> Jason.decode!()
|> parse_auth_data(refresh_token)
{:ok, auth_data}
other_response ->
handle_errors(other_response)
end
end
################################################################################
#################################### PLAYER ####################################
################################################################################
@impl true
def get_devices(token) do
case json_get(@base_url <> "/me/player/devices", auth_headers(token)) do
{:ok, %{status: 200} = response} ->
devices =
response.body
|> Jason.decode!()
|> Map.get("devices")
|> Enum.map(&parse_device/1)
{:ok, devices}
other_response ->
handle_errors(other_response)
end
end
@impl true
def next(token) do
case post(@base_url <> "/me/player/next", <<>>, auth_headers(token)) do
{:ok, %{status: 204}} ->
:ok
other_response ->
handle_errors(other_response)
end
end
@impl true
def now_playing(token) do
case json_get(@base_url <> "/me/player?additional_types=episode", auth_headers(token)) do
{:ok, %{status: 204}} ->
{:ok, %Player{}}
{:ok, %{status: 200} = response} ->
{:ok,
response.body
|> Jason.decode!()
|> parse_now_playing()}
other_response ->
handle_errors(other_response)
end
end
@impl true
def pause(token) do
case json_put(@base_url <> "/me/player/pause", %{}, auth_headers(token)) do
{:ok, %{status: 204}} ->
:ok
other_response ->
handle_errors(other_response)
end
end
@impl true
def play(token) do
case json_put(@base_url <> "/me/player/play", %{}, auth_headers(token)) do
{:ok, %{status: 204}} ->
:ok
other_response ->
handle_errors(other_response)
end
end
@impl true
def play(token, uri) do
payload =
if uri =~ "track" or uri =~ "episode" do
%{uris: [uri]}
else
%{context_uri: uri}
end
case json_put(@base_url <> "/me/player/play", payload, auth_headers(token)) do
{:ok, %{status: 204}} ->
:ok
other_response ->
handle_errors(other_response)
end
end
@impl true
def play(token, uri, context_uri) do
payload = %{
context_uri: context_uri,
offset: %{
uri: uri
}
}
case json_put(@base_url <> "/me/player/play", payload, auth_headers(token)) do
{:ok, %{status: 204}} ->
:ok
other_response ->
handle_errors(other_response)
end
end
@impl true
def prev(token) do
case post(@base_url <> "/me/player/previous", <<>>, auth_headers(token)) do
{:ok, %{status: 204}} ->
:ok
other_response ->
handle_errors(other_response)
end
end
@impl true
def seek(token, position_ms) do
params = %{
position_ms: position_ms
}
case put(
@base_url <> "/me/player/seek?" <> URI.encode_query(params),
<<>>,
auth_headers(token)
) do
{:ok, %{status: 204}} ->
:ok
other_response ->
handle_errors(other_response)
end
end
@impl true
def set_volume(token, volume_percent) do
params = %{
volume_percent: volume_percent
}
case put(
@base_url <> "/me/player/volume?" <> URI.encode_query(params),
<<>>,
auth_headers(token)
) do
{:ok, %{status: 204}} ->
:ok
other_response ->
handle_errors(other_response)
end
end
@impl true
def transfer_playback(token, device_id) do
params = %{
device_ids: [device_id]
}
case json_put(@base_url <> "/me/player", params, auth_headers(token)) do
{:ok, %{status: 204}} ->
:ok
other_response ->
handle_errors(other_response)
end
end
################################################################################
################################### CONTENT ####################################
################################################################################
@impl true
def get_album(token, album_id) do
params = %{
market: "from_token"
}
case json_get(
@base_url <> "/albums/" <> album_id <> "?" <> URI.encode_query(params),
auth_headers(token)
) do
{:ok, %{status: 200} = response} ->
album =
response.body
|> Jason.decode!()
|> parse_album()
{:ok, album}
other_response ->
handle_errors(other_response)
end
end
@impl true
def get_artist(token, artist_id) do
case json_get(
@base_url <> "/artists/" <> artist_id,
auth_headers(token)
) do
{:ok, %{status: 200} = response} ->
artist =
response.body
|> Jason.decode!()
|> parse_artist()
{:ok, artist}
other_response ->
handle_errors(other_response)
end
end
@impl true
def get_artist_albums(token, artist_id, opts) do
limit = Keyword.get(opts, :limit, @default_limit)
offset = Keyword.get(opts, :offset, @default_offset)
album_group = Keyword.get(opts, :album_group, :all)
params = %{
market: "from_token",
limit: limit,
offset: offset
}
params =
case album_group do
:all -> params
other_group -> Map.put(params, :include_groups, other_group)
end
case json_get(
@base_url <> "/artists/" <> artist_id <> "/albums" <> "?" <> URI.encode_query(params),
auth_headers(token)
) do
{:ok, %{status: 200} = response} ->
albums =
response.body
|> Jason.decode!()
|> parse_artist_albums()
{:ok, albums}
other_response ->
handle_errors(other_response)
end
end
@impl true
def get_episodes(token, show_id) do
params = %{
market: "from_token"
}
case json_get(
@base_url <> "/shows/" <> show_id <> "/episodes" <> "?" <> URI.encode_query(params),
auth_headers(token)
) do
{:ok, %{status: 200} = response} ->
episodes =
response.body
|> Jason.decode!()
|> Map.get("items")
|> Enum.map(&parse_episode/1)
{:ok, episodes}
other_response ->
handle_errors(other_response)
end
end
@impl true
def get_playlist(token, playlist_id) do
params = %{
market: "from_token"
}
case json_get(
@base_url <> "/playlists/" <> playlist_id <> "?" <> URI.encode_query(params),
auth_headers(token)
) do
{:ok, %{status: 200} = response} ->
playlist =
response.body
|> Jason.decode!()
|> parse_playlist()
{:ok, playlist}
other_response ->
handle_errors(other_response)
end
end
@impl true
def get_recommendations_from_artists(token, artist_ids) do
params = %{
seed_artists: Enum.join(artist_ids, ","),
market: "from_token"
}
case json_get(
@base_url <> "/recommendations" <> "?" <> URI.encode_query(params),
auth_headers(token)
) do
{:ok, %{status: 200} = response} ->
tracks =
response.body
|> Jason.decode!()
|> Map.get("tracks")
|> Enum.map(&parse_track/1)
{:ok, tracks}
other_response ->
handle_errors(other_response)
end
end
@impl true
def get_show(token, show_id) do
params = %{
market: "from_token"
}
case json_get(
@base_url <> "/shows/" <> show_id <> "?" <> URI.encode_query(params),
auth_headers(token)
) do
{:ok, %{status: 200} = response} ->
show =
response.body
|> Jason.decode!()
|> parse_show()
{:ok, show}
other_response ->
handle_errors(other_response)
end
end
@impl true
def get_episode(token, episode_id) do
params = %{
market: "from_token"
}
case json_get(
@base_url <> "/episodes/" <> episode_id <> "?" <> URI.encode_query(params),
auth_headers(token)
) do
{:ok, %{status: 200} = response} ->
episode =
response.body
|> Jason.decode!()
|> parse_episode_with_metadata()
{:ok, episode}
other_response ->
handle_errors(other_response)
end
end
@default_limit 20
@impl true
def recently_played_tracks(token, opts) do
params = %{
limit: Keyword.get(opts, :limit, @default_limit)
}
params =
case Keyword.get(opts, :before) do
nil -> params
before_time -> Map.put(params, :before, DateTime.to_unix(before_time))
end
params =
case Keyword.get(opts, :after) do
nil -> params
before_time -> Map.put(params, :before, DateTime.to_unix(before_time))
end
case json_get(
@base_url <> "/me/player/recently-played?" <> URI.encode_query(params),
auth_headers(token)
) do
{:ok, %{status: 200} = response} ->
results =
response.body
|> Jason.decode!()
|> get_in(["items", Access.all(), "track"])
|> Enum.map(&parse_track/1)
{:ok, results}
other_response ->
handle_errors(other_response)
end
end
@default_types [:track]
@impl true
def search(token, q, opts) do
limit = Keyword.get(opts, :limit, @default_limit)
offset = Keyword.get(opts, :offset, @default_offset)
types = Keyword.get(opts, :types, @default_types)
params = %{
q: q,
type: Enum.join(types, ","),
market: "from_token",
limit: limit,
offset: offset
}
case json_get(@base_url <> "/search?" <> URI.encode_query(params), auth_headers(token)) do
{:ok, %{status: 200} = response} ->
results =
response.body
|> Jason.decode!()
|> parse_search_results(types)
{:ok, results}
other_response ->
handle_errors(other_response)
end
end
@default_time_range "medium_term"
@impl true
def top_tracks(token, opts) do
limit = Keyword.get(opts, :limit, @default_limit)
offset = Keyword.get(opts, :offset, @default_offset)
time_range = Keyword.get(opts, :time_range, @default_time_range)
params = %{
limit: limit,
offset: offset,
time_range: time_range
}
case json_get(@base_url <> "/me/top/tracks?" <> URI.encode_query(params), auth_headers(token)) do
{:ok, %{status: 200} = response} ->
results =
response.body
|> Jason.decode!()
|> Map.get("items")
|> Enum.map(&parse_track/1)
{:ok, results}
other_response ->
handle_errors(other_response)
end
end
defp auth_headers(token) do
[{"Authorization", "Bearer #{token}"}]
end
defp json_get(url, headers) do
get(url, @json_headers ++ headers)
end
defp json_put(url, params, headers) do
put(url, Jason.encode!(params), @json_headers ++ headers)
end
defp form_post(url, params, headers) do
post(url, URI.encode_query(params), @form_headers ++ headers)
end
defp get(url, headers) do
Finch.build(:get, url, headers)
|> Finch.request(FutureMadeConcerts.Finch)
end
defp post(url, body, headers) do
Finch.build(:post, url, headers, body)
|> Finch.request(FutureMadeConcerts.Finch)
end
defp put(url, body, headers) do
Finch.build(:put, url, headers, body)
|> Finch.request(FutureMadeConcerts.Finch)
end
defp parse_profile(data) do
%User{
name: Map.get(data, "display_name"),
avatar_url: get_in(data, ["images", Access.at(0), "url"]),
product: Map.get(data, "product")
}
end
defp handle_errors(response) do
case response do
{:ok, %{status: 401 = status, body: body}} ->
if body =~ "expired" do
track_error(:expired_token, status)
{:error, :expired_token}
else
track_error(:invalid_token, status)
{:error, :invalid_token}
end
{:ok, %{status: status, body: body}} ->
track_error(:error_response, status, body)
{:error, status}
{:error, reason} = error ->
track_connection_error(reason)
error
end
end
defp track_error(type, status) do
Logger.warn(fn ->
"Spotify HTTP Api error: #{type}"
end)
:telemetry.execute([:FutureMadeConcerts, :spotify, :api_error], %{count: 1}, %{
error_type: type,
status: status
})
end
defp track_error(type, status, body) do
Logger.warn(fn ->
"Spotify HTTP Api error: #{status}, #{body}"
end)
:telemetry.execute([:FutureMadeConcerts, :spotify, :api_error], %{count: 1}, %{
error_type: type,
status: status
})
end
defp track_connection_error(reason) do
Logger.warn(fn ->
"Spotify HTTP Api connection error: #{reason}"
end)
:telemetry.execute([:FutureMadeConcerts, :spotify, :api_error], %{count: 1}, %{
error_type: :connection_error
})
end
defp parse_auth_data(data, refresh_token) do
%Credentials{
expires: true,
expires_at: OAuth2.Util.unix_now() + data["expires_in"],
refresh_token: refresh_token,
scopes: [data["scope"]],
token: data["access_token"],
token_type: data["token_type"]
}
end
defp parse_now_playing(data) do
case Map.get(data, "currently_playing_type") do
# this appears when queueing a new album/show
"unknown" ->
%Player{}
"ad" ->
%Player{}
"track" ->
item =
data
|> Map.get("item")
|> parse_track()
progress_ms = Map.get(data, "progress_ms")
status = if Map.get(data, "is_playing"), do: :playing, else: :paused
device =
data
|> Map.get("device")
|> parse_device()
%Player{status: status, item: item, progress_ms: progress_ms, device: device}
"episode" ->
item =
data
|> Map.get("item")
|> parse_episode_with_metadata()
progress_ms = Map.get(data, "progress_ms")
status = if Map.get(data, "is_playing"), do: :playing, else: :paused
device =
data
|> Map.get("device")
|> parse_device()
%Player{status: status, item: item, progress_ms: progress_ms, device: device}
end
end
defp parse_track(item) do
%Track{
id: Map.get(item, "id"),
uri: Map.get(item, "uri"),
name: Map.get(item, "name"),
spotify_url: parse_spotify_url(item),
duration_ms: Map.get(item, "duration_ms"),
track_number: Map.get(item, "track_number"),
disc_number: Map.get(item, "disc_number"),
artists:
item
|> Map.get("artists")
|> Enum.map(&parse_artist/1),
album:
item
|> Map.get("album")
|> parse_album()
}
end
defp parse_album_track(item) do
%Track{
id: Map.get(item, "id"),
uri: Map.get(item, "uri"),
name: Map.get(item, "name"),
spotify_url: parse_spotify_url(item),
duration_ms: Map.get(item, "duration_ms"),
track_number: Map.get(item, "track_number"),
disc_number: Map.get(item, "disc_number"),
artists: :not_fetched,
album: :not_fetched
}
end
defp parse_artist(item) do
%Artist{
id: Map.get(item, "id"),
uri: Map.get(item, "uri"),
name: Map.get(item, "name"),
spotify_url: parse_spotify_url(item),
albums: :not_fetched,
total_albums: :not_fetched,
genres: Map.get(item, "genres"),
thumbnails:
if Map.has_key?(item, "images") do
item
|> Map.get("images")
|> parse_thumbnails()
else
:not_fetched
end
}
end
defp parse_album(item) do
%Album{
id: Map.get(item, "id"),
uri: Map.get(item, "uri"),
name: Map.get(item, "name"),
spotify_url: parse_spotify_url(item),
album_type: Map.get(item, "album_type"),
album_group: Map.get(item, "album_group", "album"),
release_date: Map.get(item, "release_date"),
release_date_precision: Map.get(item, "release_date_precision"),
artists:
item
|> Map.get("artists")
|> Enum.map(&parse_artist/1),
thumbnails:
item
|> Map.get("images")
|> parse_thumbnails(),
genres: Map.get(item, "genres"),
copyrights:
if Map.has_key?(item, "copyrights") do
item
|> Map.get("copyrights")
|> Enum.map(&parse_copyright/1)
else
:not_fetched
end,
tracks:
if Map.has_key?(item, "tracks") do
item
|> get_in(["tracks", "items"])
|> Enum.map(&parse_album_track/1)
else
:not_fetched
end
}
end
defp parse_artist_albums(results) do
total = Map.get(results, "total")
albums =
results
|> Map.get("items")
|> Enum.map(&parse_album/1)
%{albums: albums, total: total}
end
defp parse_episode_with_metadata(item) do
%Episode{
id: Map.get(item, "id"),
uri: Map.get(item, "uri"),
name: Map.get(item, "name"),
spotify_url: parse_spotify_url(item),
description: Map.get(item, "description"),
duration_ms: Map.get(item, "duration_ms"),
thumbnails:
item
|> Map.get("images")
|> parse_thumbnails(),
show:
item
|> Map.get("show")
|> parse_show(),
publisher: %Publisher{
name: get_in(item, ["show", "publisher"])
}
}
end
defp parse_episode(item) do
%Episode{
id: Map.get(item, "id"),
uri: Map.get(item, "uri"),
name: Map.get(item, "name"),
spotify_url: parse_spotify_url(item),
description: Map.get(item, "description"),
duration_ms: Map.get(item, "duration_ms"),
thumbnails:
item
|> Map.get("images")
|> parse_thumbnails(),
show: :not_fetched,
publisher: :not_fetched
}
end
defp parse_show(item) do
%Show{
id: Map.get(item, "id"),
uri: Map.get(item, "uri"),
name: Map.get(item, "name"),
spotify_url: parse_spotify_url(item),
description: Map.get(item, "description"),
episodes: :not_fetched,
publisher: %Publisher{
name: Map.get(item, "publisher")
},
thumbnails:
item
|> Map.get("images")
|> parse_thumbnails(),
total_episodes: Map.get(item, "total_episodes")
}
end
defp parse_thumbnails(images) do
Enum.into(images, %{}, fn
%{"height" => height, "url" => url} when height in 0..199 -> {:small, url}
%{"height" => height, "url" => url} when height in 200..449 -> {:medium, url}
%{"height" => height, "url" => url} when height >= 450 -> {:large, url}
end)
end
defp parse_search_results(results, types) do
Enum.reduce(types, %{}, fn
:album, acc ->
albums =
results
|> get_in(["albums", "items"])
|> Enum.map(&parse_album/1)
total = get_in(results, ["albums", "total"])
Map.put(acc, :album, %{items: albums, total: total})
:artist, acc ->
artists =
results
|> get_in(["artists", "items"])
|> Enum.map(&parse_artist/1)
total = get_in(results, ["artists", "total"])
Map.put(acc, :artist, %{items: artists, total: total})
:track, acc ->
tracks =
results
|> get_in(["tracks", "items"])
|> Enum.map(&parse_track/1)
total = get_in(results, ["tracks", "total"])
Map.put(acc, :track, %{items: tracks, total: total})
:show, acc ->
shows =
results
|> get_in(["shows", "items"])
|> Enum.map(&parse_show/1)
total = get_in(results, ["shows", "total"])
Map.put(acc, :show, %{items: shows, total: total})
:episode, acc ->
episodes =
results
|> get_in(["episodes", "items"])
|> Enum.map(&parse_episode/1)
total = get_in(results, ["episodes", "total"])
Map.put(acc, :episode, %{items: episodes, total: total})
:playlist, acc ->
playlists =
results
|> get_in(["playlists", "items"])
|> Enum.map(&parse_playlist/1)
total = get_in(results, ["playlists", "total"])
Map.put(acc, :playlists, %{items: playlists, total: total})
end)
end
defp parse_playlist(item) do
%Playlist{
id: Map.get(item, "id"),
uri: Map.get(item, "uri"),
name: Map.get(item, "name"),
spotify_url: parse_spotify_url(item),
description: Map.get(item, "description"),
thumbnails:
item
|> Map.get("images")
|> parse_thumbnails(),
tracks:
case get_in(item, ["tracks", "items"]) do
nil ->
:not_fetched
items ->
items
|> get_in([Access.all(), "track"])
|> Enum.reject(&is_nil/1)
|> Enum.map(&parse_track/1)
end
}
end
defp parse_device(device) do
%Device{
id: Map.get(device, "id"),
is_active: Map.get(device, "is_active"),
is_private_session: Map.get(device, "is_private_session"),
is_restricted: Map.get(device, "is_restricted"),
name: Map.get(device, "name"),
type: Map.get(device, "type"),
volume_percent: Map.get(device, "volume_percent")
}
end
defp parse_spotify_url(item) do
get_in(item, ["external_urls", "spotify"])
end
defp parse_copyright(copyright) do
%Copyright{
text: Map.get(copyright, "text"),
type: Map.get(copyright, "type")
}
end
end
| 24.539877 | 117 | 0.542167 |
798cce5e19a9f93f809078adcce48bc836e78e18 | 808 | ex | Elixir | lib/anchore_engine_api_server/model/content_files_response.ex | michaeljguarino/anchore-elixir-client | 156a44f429ecb62433729a2b4c52de5dc0ef44d2 | [
"MIT"
] | null | null | null | lib/anchore_engine_api_server/model/content_files_response.ex | michaeljguarino/anchore-elixir-client | 156a44f429ecb62433729a2b4c52de5dc0ef44d2 | [
"MIT"
] | null | null | null | lib/anchore_engine_api_server/model/content_files_response.ex | michaeljguarino/anchore-elixir-client | 156a44f429ecb62433729a2b4c52de5dc0ef44d2 | [
"MIT"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule AnchoreEngineAPIServer.Model.ContentFilesResponse do
@moduledoc """
File content listings from images
"""
@derive [Poison.Encoder]
defstruct [
:content,
:content_type,
:imageDigest
]
@type t :: %__MODULE__{
:content => [ContentFilesResponseContent],
:content_type => String.t,
:imageDigest => String.t
}
end
defimpl Poison.Decoder, for: AnchoreEngineAPIServer.Model.ContentFilesResponse do
import AnchoreEngineAPIServer.Deserializer
def decode(value, options) do
value
|> deserialize(:content, :list, AnchoreEngineAPIServer.Model.ContentFilesResponseContent, options)
end
end
| 25.25 | 102 | 0.740099 |
798ce42272a129be3e32d55a6b05c6279cb3a7f7 | 80 | exs | Elixir | installer/templates/phx_umbrella/apps/app_name_web/test/test_helper.exs | matthewp/phoenix | af86b95db365faada58d326e98de65c811690bc7 | [
"MIT"
] | 2 | 2016-11-01T15:01:48.000Z | 2016-11-01T15:07:20.000Z | installer/templates/phx_umbrella/apps/app_name_web/test/test_helper.exs | matthewp/phoenix | af86b95db365faada58d326e98de65c811690bc7 | [
"MIT"
] | 1 | 2020-05-26T19:38:18.000Z | 2020-05-26T19:38:18.000Z | installer/templates/phx_umbrella/apps/app_name_web/test/test_helper.exs | matthewp/phoenix | af86b95db365faada58d326e98de65c811690bc7 | [
"MIT"
] | 1 | 2020-08-31T15:29:31.000Z | 2020-08-31T15:29:31.000Z | ExUnit.start()<%= if ecto do %>
<%= adapter_config[:test_setup_all] %><% end %>
| 26.666667 | 47 | 0.625 |
798cf6adf8c2b899bdc181f9d067e4ff1a7bbea9 | 1,332 | exs | Elixir | elixir/elixir-sips/samples/agenda/test/agenda/parser.exs | afronski/playground-erlang | 6ac4b58b2fd717260c22a33284547d44a9b5038e | [
"MIT"
] | 2 | 2015-12-09T02:16:51.000Z | 2021-07-26T22:53:43.000Z | elixir/elixir-sips/samples/agenda/test/agenda/parser.exs | afronski/playground-erlang | 6ac4b58b2fd717260c22a33284547d44a9b5038e | [
"MIT"
] | null | null | null | elixir/elixir-sips/samples/agenda/test/agenda/parser.exs | afronski/playground-erlang | 6ac4b58b2fd717260c22a33284547d44a9b5038e | [
"MIT"
] | 1 | 2016-05-08T18:40:31.000Z | 2016-05-08T18:40:31.000Z | defmodule AgendaTest do
use ExUnit.Case
@command1 Code.string_to_quoted!("Module.function(:arg1)")
@command2 Code.string_to_quoted!("Module.function(:arg2, :arg3)")
test "parse a schedule string" do
assert Agenda.Parser.parse("0 0 0 0 0 Module.function(:arg1)") == %Agenda.Schedule{minute: [0], hour: [0], day_of_month: [0], month: [0], day_of_week: [0], command: @command1}
assert Agenda.Parser.parse("1 0 0 0 0 Module.function(:arg2, :arg3)") == %Agenda.Schedule{minute: [1], hour: [0], day_of_month: [0], month: [0], day_of_week: [0], command: @command2}
end
test "parse multiple hours in the patterns" do
assert Agenda.Parser.parse("0 1,2 0 0 0 Module.function(:arg1)") == %Agenda.Schedule{minute: [0], hour: [1,2], day_of_month: [0], month: [0], day_of_week: [0], command: @command1}
end
test "parsing a wildcard" do
assert Agenda.Parser.parse("0 0 0 0 * Module.function(:arg1)") == %Agenda.Schedule{minute: [0], hour: [0], day_of_month: [0], month: [0], day_of_week: [0,1,2,3,4,5,6], command: @command1}
end
test "parsing patterns like 'every five minutes'" do
assert Agenda.Parser.parse("*/5 0 0 0 0 Module.function(:arg1)") == %Agenda.Schedule{minute: [0,5,10,15,20,25,30,35,40,45,50,55], hour: [0], day_of_month: [0], month: [0], day_of_week: [0], command: @command1}
end
end
| 55.5 | 213 | 0.67042 |
798d0a3c326819650fadeafd357b39055c6c909f | 2,427 | exs | Elixir | mix.exs | soleo/changelog.com | 621c7471b23379e1cdd4a0c960b66ed98d8d1a53 | [
"MIT"
] | null | null | null | mix.exs | soleo/changelog.com | 621c7471b23379e1cdd4a0c960b66ed98d8d1a53 | [
"MIT"
] | null | null | null | mix.exs | soleo/changelog.com | 621c7471b23379e1cdd4a0c960b66ed98d8d1a53 | [
"MIT"
] | null | null | null | defmodule Changelog.Mixfile do
use Mix.Project
def project do
[app: :changelog,
version: "0.0.1",
elixir: "~> 1.3",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps()]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[mod: {Changelog, []},
applications: [:bamboo, :bamboo_smtp, :phoenix, :phoenix_pubsub,
:phoenix_html, :cowboy, :logger, :phoenix_ecto, :postgrex,
:ex_machina, :httpoison, :exjsx, :con_cache, :timex_ecto,
:nimble_csv, :ex_aws, :briefly]]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[{:phoenix, "~> 1.2.0"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_ecto, "~> 3.0-rc"},
{:phoenix_html, "~> 2.3"},
{:postgrex, "~> 0.11.1"},
{:timex, "~> 3.1.0"},
{:timex_ecto, "~> 3.0"},
{:scrivener_ecto, "~> 1.0"},
{:scrivener_html, "~> 1.3.0"},
{:cmark, "~> 0.6"},
{:html_sanitize_ex, "~> 0.1.0"},
{:arc_ecto, "~> 0.5.0-rc1"},
{:ecto_enum, "~> 0.3.0"},
{:hashids, "~> 2.0"},
{:bamboo, "~> 0.7"},
{:bamboo_smtp, "~> 1.2.1"},
{:httpoison, "~> 0.9.0"},
{:con_cache, "~> 0.11.1"},
{:ex_aws, "~> 1.0.0-rc.3"},
{:exjsx, "~> 3.2.0"},
{:nimble_csv, "~> 0.1.0"},
{:sweet_xml, "~> 0.5"},
{:briefly, "~> 0.3"},
{:cowboy, "~> 1.0"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:ex_machina, "~> 1.0"},
{:credo, "~> 0.4", only: [:dev, :test]},
{:mock, "~> 0.2.0", only: :test}]
end
# Aliases are shortcut or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
["ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"test": ["ecto.create --quiet", "ecto.migrate", "test"]]
end
end
| 31.519481 | 78 | 0.542645 |
798d17e99d0d04dfd64f23770a8a3722cab74c82 | 1,144 | ex | Elixir | lib/generator.ex | arisechurch/ash_thrift | 96416cbb34d543758c52813b1137959b309eb602 | [
"MIT"
] | null | null | null | lib/generator.ex | arisechurch/ash_thrift | 96416cbb34d543758c52813b1137959b309eb602 | [
"MIT"
] | null | null | null | lib/generator.ex | arisechurch/ash_thrift | 96416cbb34d543758c52813b1137959b309eb602 | [
"MIT"
] | null | null | null | defmodule AshThrift.Generator do
alias Ash.Dsl.Extension
alias Ash.Resource.Attribute
alias AshThrift.Conversion
alias AshThrift.Field
def resource(resource) do
Extension.get_persisted(resource, :thrift)
|> Enum.map(&thrift_struct/1)
end
def namespace({language, namespace}),
do: """
namespace #{language} #{namespace}
"""
def thrift_struct({name, fields}) do
fields = Enum.map(fields, &field/1)
"""
struct #{name} {
#{fields}\
}
"""
end
def field(
{%Field{id: id, optional: optional},
%Attribute{
name: name,
type: type,
allow_nil?: allow_nil?
}}
) do
optional_or_required =
if optional or allow_nil? do
"optional"
else
"required"
end
thrift_type = Conversion.type(type)
thrift_name = name(name)
"""
#{id}: #{optional_or_required} #{thrift_type} #{thrift_name};
"""
end
def name(name) do
Macro.camelize(Atom.to_string(name))
|> then(fn <<first::utf8, rest::binary>> ->
String.downcase(<<first::utf8>>) <> rest
end)
end
end
| 19.724138 | 65 | 0.585664 |
798d310ac71c26f99395c41171cf0a2c1a371673 | 3,601 | ex | Elixir | lib/input_event.ex | hez/input_event | e831a1053d7d837ccadfb75d9e70c70b56895eab | [
"Apache-2.0"
] | 20 | 2018-09-23T18:12:14.000Z | 2020-04-18T12:37:34.000Z | lib/input_event.ex | hez/input_event | e831a1053d7d837ccadfb75d9e70c70b56895eab | [
"Apache-2.0"
] | 7 | 2018-09-17T05:40:23.000Z | 2020-06-06T20:17:53.000Z | lib/input_event.ex | hez/input_event | e831a1053d7d837ccadfb75d9e70c70b56895eab | [
"Apache-2.0"
] | 6 | 2020-12-04T21:20:39.000Z | 2022-03-16T19:26:24.000Z | defmodule InputEvent do
use GenServer
alias InputEvent.{Info, Report}
@input_event_report 1
@input_event_version 2
@input_event_name 3
@input_event_id 4
@input_event_report_info 5
@input_event_ready 6
@moduledoc """
Elixir interface to Linux input event devices
"""
@doc """
Start a GenServer that reports events from the specified input event device
"""
@spec start_link(Path.t()) :: :ignore | {:error, any()} | {:ok, pid()}
def start_link(path) do
GenServer.start_link(__MODULE__, [path, self()])
end
@doc """
Return information about this input event device
"""
@spec info(GenServer.server()) :: Info.t()
def info(server) do
GenServer.call(server, :info)
end
@doc """
Stop the InputEvent GenServer.
"""
@spec stop(GenServer.server()) :: :ok
def stop(server) do
GenServer.stop(server)
end
@doc """
Scan the system for input devices and return information on each one.
"""
@spec enumerate() :: [{String.t(), Info.t()}]
defdelegate enumerate(), to: InputEvent.Enumerate
@impl GenServer
def init([path, caller]) do
executable = :code.priv_dir(:input_event) ++ '/input_event'
port =
Port.open({:spawn_executable, executable}, [
{:args, [path]},
{:packet, 2},
:use_stdio,
:binary,
:exit_status
])
state = %{port: port, path: path, info: %Info{}, callback: caller, ready: false, deferred: []}
{:ok, state}
end
@impl GenServer
def handle_call(:info, _from, %{ready: true} = state) do
{:reply, state.info, state}
end
def handle_call(:info, from, state) do
{:noreply, %{state | deferred: [from | state.deferred]}}
end
@impl GenServer
def handle_info({_port, {:data, data}}, state) do
new_state = process_notification(state, data)
{:noreply, new_state}
end
def handle_info({_port, {:exit_status, _rc}}, state) do
send(state.callback, {:input_event, state.path, :disconnect})
{:stop, :port_crashed, state}
end
def handle_info(other, state) do
IO.puts("Not expecting: #{inspect(other)}")
send(state.callback, {:input_event, state.path, :error})
{:stop, :error, state}
end
defp process_notification(state, <<@input_event_report, _sub, raw_events::binary>>) do
Enum.each(Report.decode(raw_events), fn events ->
send(state.callback, {:input_event, state.path, events})
end)
state
end
defp process_notification(state, <<@input_event_version, _sub, version::binary>>) do
new_info = %{state.info | input_event_version: version}
%{state | info: new_info}
end
defp process_notification(state, <<@input_event_name, _sub, name::binary>>) do
new_info = %{state.info | name: name}
%{state | info: new_info}
end
defp process_notification(
state,
<<@input_event_id, _sub, bus::native-16, vendor::native-16, product::native-16,
version::native-16>>
) do
new_info = %{state.info | bus: bus, vendor: vendor, product: product, version: version}
%{state | info: new_info}
end
defp process_notification(state, <<@input_event_report_info, type, raw_report_info::binary>>) do
old_report_info = state.info.report_info
report_info = Info.decode_report_info(type, raw_report_info)
new_info = %{state.info | report_info: [report_info | old_report_info]}
%{state | info: new_info}
end
defp process_notification(state, <<@input_event_ready, _sub>>) do
Enum.each(state.deferred, fn client -> GenServer.reply(client, state.info) end)
%{state | ready: true, deferred: []}
end
end
| 27.914729 | 98 | 0.660372 |
798d3af8286df8711bbf31235a49c136a75c290e | 138 | ex | Elixir | test/support/components/void_component.ex | rktjmp/temple | 6fe46cbb4998477d76147fa95c9fd9c7841545ef | [
"MIT"
] | null | null | null | test/support/components/void_component.ex | rktjmp/temple | 6fe46cbb4998477d76147fa95c9fd9c7841545ef | [
"MIT"
] | null | null | null | test/support/components/void_component.ex | rktjmp/temple | 6fe46cbb4998477d76147fa95c9fd9c7841545ef | [
"MIT"
] | null | null | null | defmodule Temple.Components.VoidComponent do
use Temple.Component
render do
div class: "void!!" do
"bar"
end
end
end
| 13.8 | 44 | 0.666667 |
798d3c227abf120b8907de3b4311147d09f0d183 | 1,582 | ex | Elixir | clients/slides/lib/google_api/slides/v1/model/nesting_level.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/slides/lib/google_api/slides/v1/model/nesting_level.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/slides/lib/google_api/slides/v1/model/nesting_level.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Slides.V1.Model.NestingLevel do
@moduledoc """
Contains properties describing the look and feel of a list bullet at a given
level of nesting.
## Attributes
* `bulletStyle` (*type:* `GoogleApi.Slides.V1.Model.TextStyle.t`, *default:* `nil`) - The style of a bullet at this level of nesting.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:bulletStyle => GoogleApi.Slides.V1.Model.TextStyle.t()
}
field(:bulletStyle, as: GoogleApi.Slides.V1.Model.TextStyle)
end
defimpl Poison.Decoder, for: GoogleApi.Slides.V1.Model.NestingLevel do
def decode(value, options) do
GoogleApi.Slides.V1.Model.NestingLevel.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Slides.V1.Model.NestingLevel do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.958333 | 137 | 0.742099 |
798d55bbb588d24c21667d6840a87855c83ad66c | 352 | exs | Elixir | project/gamenect/priv/repo/seeds.exs | daemonfire300/learning_phoenix | 0ab0cb1feacda4ec5c0ab0a5324835a150db22ab | [
"MIT"
] | null | null | null | project/gamenect/priv/repo/seeds.exs | daemonfire300/learning_phoenix | 0ab0cb1feacda4ec5c0ab0a5324835a150db22ab | [
"MIT"
] | null | null | null | project/gamenect/priv/repo/seeds.exs | daemonfire300/learning_phoenix | 0ab0cb1feacda4ec5c0ab0a5324835a150db22ab | [
"MIT"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Gamenect.Repo.insert!(%Gamenect.SomeModel{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.333333 | 61 | 0.707386 |
798d625fb593258b823caaa98fc1f80ee8715ca5 | 1,816 | ex | Elixir | apps/firestorm_web/lib/firestorm_web/aws/upload_signature.ex | CircleCI-Public/firestorm | 9ca2c46a2b2377370347ad94d6003eeb77be38d6 | [
"MIT"
] | 10 | 2017-06-28T08:06:52.000Z | 2022-03-19T17:49:21.000Z | apps/firestorm_web/lib/firestorm_web/aws/upload_signature.ex | CircleCI-Public/firestorm | 9ca2c46a2b2377370347ad94d6003eeb77be38d6 | [
"MIT"
] | null | null | null | apps/firestorm_web/lib/firestorm_web/aws/upload_signature.ex | CircleCI-Public/firestorm | 9ca2c46a2b2377370347ad94d6003eeb77be38d6 | [
"MIT"
] | 2 | 2017-10-21T12:01:02.000Z | 2021-01-29T10:26:22.000Z | defmodule FirestormWeb.AWS.UploadSignature do
@service "s3"
@aws_request "aws4_request"
def signature(filename, mimetype) do
policy = policy(filename, mimetype)
%{
key: filename,
date: get_date(),
content_type: mimetype,
acl: "public-read",
success_action_status: "201",
action: bucket_url(),
aws_access_key_id: aws_config()[:access_key_id],
policy: policy,
credential: credential(),
signature: sign(policy)
}
end
def get_date() do
datetime = Timex.now
{:ok, t} = Timex.format(datetime, "%Y%m%d", :strftime)
t
end
defp credential() do
credential(aws_config()[:access_key_id], get_date())
end
defp credential(key, date) do
key <> "/" <> date <> "/" <> region() <> "/" <> @service <> "/" <> @aws_request
end
defp policy(key, mimetype, expire_after_min \\ 60) do
%{
expiration: min_from_now(expire_after_min),
conditions: [
%{bucket: bucket_name()},
%{acl: "public-read"},
["starts-with", "$Content-Type", mimetype],
["starts-with", "$key", key],
%{success_action_status: "201"}
]
}
|> Poison.encode!()
|> Base.encode64()
end
defp min_from_now(minutes) do
import Timex
now()
|> shift(minutes: minutes)
|> format!("{ISO:Extended:Z}")
end
defp sign(policy) do
:sha
|> :crypto.hmac(secret_access_key(), policy)
|> Base.encode64()
end
defp bucket_name() do
aws_config()[:bucket]
end
defp region() do
aws_config()[:region]
end
defp secret_access_key() do
aws_config()[:secret_access_key]
end
defp bucket_url() do
"https://s3-#{region()}.amazonaws.com/#{bucket_name()}"
end
defp aws_config() do
Application.get_env(:firestorm_web, :aws)
end
end
| 21.364706 | 83 | 0.603524 |
798d967801cb79a465444436d61902df7b52aaa4 | 937 | ex | Elixir | lib/db.ex | craigspaeth/eql | 6c8d0db667c1871ec60654facd84b7b739b6dba3 | [
"MIT"
] | null | null | null | lib/db.ex | craigspaeth/eql | 6c8d0db667c1871ec60654facd84b7b739b6dba3 | [
"MIT"
] | null | null | null | lib/db.ex | craigspaeth/eql | 6c8d0db667c1871ec60654facd84b7b739b6dba3 | [
"MIT"
] | null | null | null | defmodule Db do
def init do
db = Mongo.connect! |> Mongo.db("eql")
Agent.start_link fn -> [db] end, name: __MODULE__
end
def db do
Agent.get(__MODULE__, fn list -> List.first list end)
end
def find_one(col, query) do
db
|> Mongo.Db.collection(Atom.to_string col)
|> Mongo.Collection.find(query)
|> Enum.to_list
|> List.first
end
def find(col, query) do
db
|> Mongo.Db.collection(Atom.to_string col)
|> Mongo.Collection.find(query)
|> Enum.to_list
end
def save(col, data) do
if data[:_id] && length(find_one col, %{_id: data[:_id]}) > 0 do
db
|> Mongo.Db.collection(Atom.to_string col)
|> Mongo.Collection.update(%{_id: data[:_id]}, data)
find_one col, %{_id: data[:_id]}
else
collection = db |> Mongo.Db.collection(Atom.to_string col)
Mongo.Collection.insert_one data, collection
find_one col, %{}
end
end
end
| 23.425 | 68 | 0.621131 |
798d98758511ef77533f83fa99e214448b5acee6 | 1,422 | ex | Elixir | elixir/advent_of_code/lib/2021/day2.ex | rhishikeshj/advent-of-code | 71231217baaaa27994a2fd1980fe800426aef4bc | [
"Apache-2.0"
] | null | null | null | elixir/advent_of_code/lib/2021/day2.ex | rhishikeshj/advent-of-code | 71231217baaaa27994a2fd1980fe800426aef4bc | [
"Apache-2.0"
] | null | null | null | elixir/advent_of_code/lib/2021/day2.ex | rhishikeshj/advent-of-code | 71231217baaaa27994a2fd1980fe800426aef4bc | [
"Apache-2.0"
] | 1 | 2020-01-02T06:41:43.000Z | 2020-01-02T06:41:43.000Z | defmodule AOC.Day2 do
@moduledoc """
Solution to Day 2 of the Advent of code 2021
https://adventofcode.com/2021/day/2
"""
@doc """
Read the input file
"""
@spec get_inputs(File) :: [String.t()]
def get_inputs(f \\ "lib/inputs/day2.txt"),
do:
File.read!(f)
|> String.trim()
|> String.split("\n")
|> Enum.map(&(String.split(&1, " ") |> then(fn [c, v] -> [c, String.to_integer(v)] end)))
@doc """
Given a set of commands in the form [["forward", 5] ["down", 3] ["up", 7]]
figure out the result of multiplying final horizontal and depth values
"""
def navigate(commands \\ get_inputs()) do
commands
|> Enum.reduce({0, 0}, fn [c, v], {h, d} ->
case c do
"forward" -> {h + v, d}
"up" -> {h, d - v}
"down" -> {h, d + v}
end
end)
|> then(fn {h, d} -> h * d end)
end
@doc """
Given a set of commands in the form [["forward", 5] ["down", 3] ["up", 7]]
figure out the result of multiplying final horizontal and depth values. In this version
the up and down values affect aim of the navigation
"""
def navigate_with_aims(commands \\ get_inputs()) do
commands
|> Enum.reduce({0, 0, 0}, fn [c, v], {h, d, a} ->
case c do
"forward" -> {h + v, d + a * v, a}
"up" -> {h, d, a - v}
"down" -> {h, d, a + v}
end
end)
|> then(fn {h, d, _a} -> h * d end)
end
end
| 26.830189 | 95 | 0.528833 |
798da2d473795a5fc4b363b87b61095fa4f97b6e | 859 | exs | Elixir | daily_meals/test/daily_meals/users/update_test.exs | joabehenrique/daily-meals | ea8919a45dc155046289c96854348bd7ed89c1fb | [
"MIT"
] | null | null | null | daily_meals/test/daily_meals/users/update_test.exs | joabehenrique/daily-meals | ea8919a45dc155046289c96854348bd7ed89c1fb | [
"MIT"
] | null | null | null | daily_meals/test/daily_meals/users/update_test.exs | joabehenrique/daily-meals | ea8919a45dc155046289c96854348bd7ed89c1fb | [
"MIT"
] | null | null | null | defmodule DailyMeals.User.UpdateTest do
use DailyMeals.DataCase
import DailyMeals.Factory
alias DailyMeals.User
describe "Update Meal" do
test "when a valid id is given, returns the meal" do
user_params = build(:users_params)
{:ok, %User{id: id}} = DailyMeals.create_user(user_params)
response = DailyMeals.update_user(%{"id" => id, "name" => "Jp Alves"})
assert {:ok,
%DailyMeals.User{
cpf: "12345678900",
email: "[email protected]",
name: "Jp Alves",
id: ^id
}} = response
end
test "when an invalid id is given, returns an error" do
id = 1
response = DailyMeals.update_user(%{"id" => id})
assert {:error, %DailyMeals.Error{result: "User not found", status: :not_found}} = response
end
end
end
| 26.030303 | 97 | 0.585565 |
798dac66ec5c900e071a4f5c927f819989744925 | 1,243 | ex | Elixir | example_json_parse/lib/json_reply/product_catalog/product_entry.ex | cuevacreativa/Domo | 5f2f5ff3cb57dfe774408dcae6ccb5b79d1a3089 | [
"MIT"
] | null | null | null | example_json_parse/lib/json_reply/product_catalog/product_entry.ex | cuevacreativa/Domo | 5f2f5ff3cb57dfe774408dcae6ccb5b79d1a3089 | [
"MIT"
] | null | null | null | example_json_parse/lib/json_reply/product_catalog/product_entry.ex | cuevacreativa/Domo | 5f2f5ff3cb57dfe774408dcae6ccb5b79d1a3089 | [
"MIT"
] | null | null | null | defmodule JsonReply.ProductCatalog.ProductEntry do
@moduledoc false
use Domo
alias JsonReply.ProductCatalog.ImageAsset
defstruct product_name: "",
slug: "",
image_asset_id: ImageAsset.id_placeholder(),
price: 0,
tags: [],
updated_at: ~N[2000-01-01 23:00:07]
@type t :: %__MODULE__{
product_name: String.t(),
slug: String.t(),
image_asset_id: ImageAsset.id(),
price: non_neg_integer(),
tags: [String.t()],
updated_at: NaiveDateTime.t()
}
defimpl MapShaper.Target do
def translate_source_map(_value, map) do
updated_at =
map
|> get_in(["sys", "updatedAt"])
|> NaiveDateTime.from_iso8601()
|> then(fn {:ok, date_time} -> date_time end)
fields =
map
|> Map.get("fields", %{})
|> Map.take(["productName", "slug", "image", "price", "tags"])
image_asset_id =
fields
|> ExJSONPath.eval("$.image['en-US'][0].sys.id")
|> then(fn {:ok, list} -> list end)
|> List.first()
fields
|> Map.put("updated_at", updated_at)
|> Map.put("image_asset_id", image_asset_id)
end
end
end
| 25.367347 | 70 | 0.547868 |
798dbc432b423a4900c581f17ab85aac1d3618df | 1,715 | ex | Elixir | lib/ellie_web/controllers/oembed_controller.ex | HenkPoley/ellie | 045212b56142341fc95b79659c3ca218b0d5d282 | [
"BSD-3-Clause"
] | 377 | 2018-04-05T03:36:00.000Z | 2022-03-30T19:12:44.000Z | lib/ellie_web/controllers/oembed_controller.ex | HenkPoley/ellie | 045212b56142341fc95b79659c3ca218b0d5d282 | [
"BSD-3-Clause"
] | 91 | 2018-05-24T21:56:06.000Z | 2022-02-26T03:54:04.000Z | lib/ellie_web/controllers/oembed_controller.ex | HenkPoley/ellie | 045212b56142341fc95b79659c3ca218b0d5d282 | [
"BSD-3-Clause"
] | 34 | 2018-05-29T03:54:35.000Z | 2022-01-13T07:12:46.000Z | defmodule EllieWeb.OembedController do
use EllieWeb, :controller
alias Ellie.Types.PrettyId
alias Ellie.Domain.Api
def oembed(conn, %{"url" => url} = params) do
parsed = URI.parse(url)
current = URI.parse(current_url(conn))
if parsed.authority != current.authority do
conn
|> send_resp(404, "")
|> halt()
else
case String.split(String.trim(parsed.path, "/"), "/") do
[project_id_string, revision_number_string] ->
for_legacy_url(conn, project_id_string, revision_number_string, params)
[id_string] ->
for_current_url(conn, id_string, params)
_ ->
conn
|> send_resp(404, "")
|> halt()
end
end
end
defp for_legacy_url(conn, project_id_string, revision_number_string, params) do
with {revision_number, _str} <- Integer.parse(revision_number_string),
{:ok, project_id} <- PrettyId.cast(project_id_string),
revision when not is_nil(revision) <- Api.retrieve_revision(project_id, revision_number) do
render(conn, "index.json",
revision: revision,
width: Map.get(params, "width", 800),
height: Map.get(params, "height", 400)
)
else
_ ->
conn
|> send_resp(404, "")
|> halt()
end
end
defp for_current_url(conn, id_string, params) do
with {:ok, id} <- PrettyId.cast(id_string),
revision when not is_nil(revision) <- Api.retrieve_revision(id) do
render(conn, "index.json",
revision: revision,
width: Map.get(params, "width", 800),
height: Map.get(params, "height", 400)
)
else
_ -> send_resp(conn, 404, "")
end
end
end
| 28.583333 | 100 | 0.608163 |
798dcb044faacde88e34d53d1b4c928a119ddb82 | 1,640 | ex | Elixir | lib/client/detected.ex | jmerriweather/pn532 | ec072d9270deec19b9acbe15d45b28222a651189 | [
"MIT"
] | null | null | null | lib/client/detected.ex | jmerriweather/pn532 | ec072d9270deec19b9acbe15d45b28222a651189 | [
"MIT"
] | null | null | null | lib/client/detected.ex | jmerriweather/pn532 | ec072d9270deec19b9acbe15d45b28222a651189 | [
"MIT"
] | null | null | null | defmodule PN532.Client.Detected do
@moduledoc """
Functions for when we are in the connected state
"""
require Logger
def detecting(:cast, :stop_target_detection, data) do
{:next_state, :connected, data}
end
def detected(:internal, {:cards_detected, cards},
data = %{current_cards: current_cards, handler: handler, connection: connection, connection_options: connection_options}) do
handler_response =
if current_cards !== cards do
apply(handler, :handle_event, [:cards_detected, cards, PN532.HandlerClient, PN532.HandlerClient.new(connection, connection_options)])
end
case handler_response do
{:noreply, %{connection: new_connection, connection_options: connection_options, detected_cards: detected_cards}} ->
{:next_state, :detecting, %{data | current_cards: cards, connection: new_connection, connection_options: connection_options, detected_cards: detected_cards}, [{:state_timeout, 1000, :poll_for_cards}]}
{:noreply, %{connection: new_connection, connection_options: connection_options}} ->
{:next_state, :detecting, %{data | current_cards: cards, connection: new_connection, connection_options: connection_options}, [{:state_timeout, 1000, :poll_for_cards}]}
_ ->
{:next_state, :detecting, data, [{:state_timeout, 1000, :poll_for_cards}]}
end
end
def detected(type, event, data) do
case PN532.Client.Connected.connected(type, event, data) do
{option, data, actions} when is_list(actions) ->
{option, data, actions}
{option, actions} when is_list(actions) ->
{option, actions}
end
end
end
| 43.157895 | 208 | 0.712195 |
798e2d239940a869bd62ff9e2c970e050084f0f2 | 2,679 | ex | Elixir | lib/spell.ex | reverie/spell | 0688cb768b43871a2896a5588194117808dec1e8 | [
"Apache-2.0"
] | 1 | 2016-01-19T00:42:00.000Z | 2016-01-19T00:42:00.000Z | lib/spell.ex | reverie/spell | 0688cb768b43871a2896a5588194117808dec1e8 | [
"Apache-2.0"
] | null | null | null | lib/spell.ex | reverie/spell | 0688cb768b43871a2896a5588194117808dec1e8 | [
"Apache-2.0"
] | null | null | null | defmodule Spell do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
pid = spawn(Spell, :init, [])
{:ok, pid}
end
def init do
import Supervisor.Spec, warn: false
Process.flag(:trap_exit, true)
upa_agent = %Spell.User{
user_id: :upa_agent,
name: "Wompus",
current_room: nil
}
treehouse = %Spell.Room{
room_id: :treehouse,
name: "Treehouse",
exits: %{
:d => :dungeon
}
}
dungeon = %Spell.Room{
room_id: :dungeon,
name: "Ye Olde Dungeon",
exits: %{
:u => :treehouse
}
}
# TODO: room called Jeffrey
users = [upa_agent]
rooms = [treehouse, dungeon]
user_children = Enum.map(users, fn u -> worker(Spell.User, [u], [id: u.user_id]) end)
room_children = Enum.map(rooms, fn r -> worker(Spell.Room, [r], [id: r.room_id]) end)
children = Enum.concat([user_children, room_children])
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Spell.Supervisor]
result = Supervisor.start_link(children, opts)
loop([])
end
def loop(clients) do
receive do
{sender, :connect, username} ->
Process.link(sender)
IO.puts(Atom.to_string(username) <> " joined!")
broadcast({:info, Atom.to_string(username) <> " joined the chat"}, clients)
loop([{username, sender} | clients])
{sender, :broadcast, msg} ->
broadcast({:new_msg, find(sender, clients), msg}, clients)
loop(clients)
{:EXIT, pid, _} ->
broadcast({:info, Atom.to_string(find(pid, clients)) <> " left the chat."}, clients)
IO.puts("caught an exit")
loop(clients |> Enum.filter(fn {_, rec} -> rec != pid end))
_ -> raise "Unexpected message in Spell receive loop"
end
#IO.puts ("User has: " <> to_string(Spell.User.get_state(:upa_agent)))
#action = IO.gets "Whatcha gonna do about it? >"
#Spell.User.set_state(:upa_agent, action)
#IO.puts ("User NOW has: " <> to_string(Spell.User.get_state(:upa_agent)))
#result
end
defp broadcast(msg, clients) do
Enum.each clients, fn {_, receiver_pid} -> send(receiver_pid, msg) end
end
defp find(sender, [{u, p} | _]) when p == sender, do: u
defp find(sender, [_ | t]), do: find(sender, t)
end
| 33.074074 | 92 | 0.568122 |
798e48179655476ce3c2419fbb4dce3a98932856 | 490 | exs | Elixir | test/cassandra_graphql_web/views/error_view_test.exs | sgeos/cassandra-graphql | 9d566c23d6499ff7a3a6f05fec3eef2dce54bc0d | [
"BSD-3-Clause"
] | null | null | null | test/cassandra_graphql_web/views/error_view_test.exs | sgeos/cassandra-graphql | 9d566c23d6499ff7a3a6f05fec3eef2dce54bc0d | [
"BSD-3-Clause"
] | null | null | null | test/cassandra_graphql_web/views/error_view_test.exs | sgeos/cassandra-graphql | 9d566c23d6499ff7a3a6f05fec3eef2dce54bc0d | [
"BSD-3-Clause"
] | null | null | null | defmodule CassandraGraphqlWeb.ErrorViewTest do
use CassandraGraphqlWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.json" do
assert render(CassandraGraphqlWeb.ErrorView, "404.json", []) == %{errors: %{detail: "Not Found"}}
end
test "renders 500.json" do
assert render(CassandraGraphqlWeb.ErrorView, "500.json", []) ==
%{errors: %{detail: "Internal Server Error"}}
end
end
| 30.625 | 101 | 0.706122 |
798e598629e5540189042bf446f02822f24b4fe3 | 13,670 | ex | Elixir | backend/lib/edgehog/devices.ex | szakhlypa/edgehog | b1193c26f403132dead6964c1c052e5dcae533af | [
"Apache-2.0"
] | null | null | null | backend/lib/edgehog/devices.ex | szakhlypa/edgehog | b1193c26f403132dead6964c1c052e5dcae533af | [
"Apache-2.0"
] | null | null | null | backend/lib/edgehog/devices.ex | szakhlypa/edgehog | b1193c26f403132dead6964c1c052e5dcae533af | [
"Apache-2.0"
] | null | null | null | #
# This file is part of Edgehog.
#
# Copyright 2021 SECO Mind Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
#
defmodule Edgehog.Devices do
@moduledoc """
The Devices context.
"""
import Ecto.Query, warn: false
alias Ecto.Multi
alias Edgehog.Repo
alias Edgehog.Devices.SystemModel
alias Edgehog.Devices.SystemModelDescription
alias Edgehog.Devices.SystemModelPartNumber
alias Edgehog.Devices.HardwareType
alias Edgehog.Devices.HardwareTypePartNumber
alias Edgehog.Devices.Tag
alias Edgehog.Assets
@doc """
Returns the list of hardware_types.
## Examples
iex> list_hardware_types()
[%HardwareType{}, ...]
"""
def list_hardware_types do
Repo.all(HardwareType)
|> Repo.preload(:part_numbers)
end
@doc """
Gets a single hardware_type.
Returns `{:error, :not_found}` if the Hardware type does not exist.
## Examples
iex> fetch_hardware_type(123)
{:ok, %HardwareType{}}
iex> fetch_hardware_type(456)
{:error, :not_found}
"""
def fetch_hardware_type(id) do
case Repo.get(HardwareType, id) do
%HardwareType{} = hardware_type ->
{:ok, Repo.preload(hardware_type, :part_numbers)}
nil ->
{:error, :not_found}
end
end
@doc """
Creates a hardware_type.
## Examples
iex> create_hardware_type(%{field: value})
{:ok, %HardwareType{}}
iex> create_hardware_type(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_hardware_type(attrs \\ %{}) do
{part_numbers, attrs} = Map.pop(attrs, :part_numbers, [])
changeset =
%HardwareType{tenant_id: Repo.get_tenant_id()}
|> HardwareType.changeset(attrs)
Multi.new()
|> Multi.run(:assoc_part_numbers, fn _repo, _changes ->
{:ok,
insert_or_get_part_numbers(HardwareTypePartNumber, changeset, part_numbers, required: true)}
end)
|> Multi.insert(:hardware_type, fn %{assoc_part_numbers: changeset} ->
changeset
end)
|> Repo.transaction()
|> case do
{:ok, %{hardware_type: hardware_type}} ->
{:ok, Repo.preload(hardware_type, :part_numbers)}
{:error, _failed_operation, failed_value, _changes_so_far} ->
{:error, failed_value}
end
end
defp insert_or_get_part_numbers(schema, changeset, part_numbers, opts \\ [])
defp insert_or_get_part_numbers(_schema, changeset, [], opts) do
if opts[:required] do
Ecto.Changeset.add_error(changeset, :part_numbers, "are required")
else
changeset
end
end
defp insert_or_get_part_numbers(schema, changeset, part_numbers, _opts) do
timestamp =
NaiveDateTime.utc_now()
|> NaiveDateTime.truncate(:second)
maps =
Enum.map(
part_numbers,
&%{
tenant_id: Repo.get_tenant_id(),
part_number: &1,
inserted_at: timestamp,
updated_at: timestamp
}
)
# TODO: check for conflicts (i.e. part numbers existing but associated with another hardware type)
Repo.insert_all(schema, maps, on_conflict: :nothing)
query = from pn in schema, where: pn.part_number in ^part_numbers
part_numbers = Repo.all(query)
Ecto.Changeset.put_assoc(changeset, :part_numbers, part_numbers)
end
@doc """
Updates a hardware_type.
## Examples
iex> update_hardware_type(hardware_type, %{field: new_value})
{:ok, %HardwareType{}}
iex> update_hardware_type(hardware_type, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_hardware_type(%HardwareType{} = hardware_type, attrs) do
{part_numbers, attrs} = Map.pop(attrs, :part_numbers, [])
changeset = HardwareType.changeset(hardware_type, attrs)
Multi.new()
|> Multi.run(:assoc_part_numbers, fn _repo, _changes ->
{:ok, insert_or_get_part_numbers(HardwareTypePartNumber, changeset, part_numbers)}
end)
|> Multi.update(:hardware_type, fn %{assoc_part_numbers: changeset} ->
changeset
end)
|> Repo.transaction()
|> case do
{:ok, %{hardware_type: hardware_type}} ->
{:ok, Repo.preload(hardware_type, :part_numbers)}
{:error, _failed_operation, failed_value, _changes_so_far} ->
{:error, failed_value}
end
end
@doc """
Deletes a hardware_type.
## Examples
iex> delete_hardware_type(hardware_type)
{:ok, %HardwareType{}}
iex> delete_hardware_type(hardware_type)
{:error, %Ecto.Changeset{}}
"""
def delete_hardware_type(%HardwareType{} = hardware_type) do
hardware_type
|> HardwareType.delete_changeset()
|> Repo.delete()
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking hardware_type changes.
## Examples
iex> change_hardware_type(hardware_type)
%Ecto.Changeset{data: %HardwareType{}}
"""
def change_hardware_type(%HardwareType{} = hardware_type, attrs \\ %{}) do
HardwareType.changeset(hardware_type, attrs)
end
@doc """
Returns the list of system_models.
## Examples
iex> list_system_models()
[%SystemModel{}, ...]
"""
def list_system_models do
Repo.all(SystemModel)
|> Repo.preload([:part_numbers, :hardware_type])
end
@doc """
Gets a single system_model.
Raises `Ecto.NoResultsError` if the System Model does not exist.
## Examples
iex> fetch_system_model(123)
{:ok, %SystemModel{}}
iex> fetch_system_model(456)
{:error, :not_found}
"""
def fetch_system_model(id) do
case Repo.get(SystemModel, id) do
%SystemModel{} = system ->
{:ok, Repo.preload(system, [:part_numbers, :hardware_type])}
nil ->
{:error, :not_found}
end
end
@doc """
Preloads only descriptions with a specific locale for an `SystemModel` (or a list of them).
"""
def preload_localized_descriptions_for_system_model(model_or_models, locale) do
descriptions_preload = SystemModelDescription.localized(locale)
Repo.preload(model_or_models, descriptions: descriptions_preload)
end
@doc """
Returns a query that selects only `SystemModelDescription` with a specific locale.
"""
def localized_system_model_description_query(locale) do
SystemModelDescription.localized(locale)
end
@doc """
Creates a system_model.
## Examples
iex> create_system_model(%{field: value})
{:ok, %SystemModel{}}
iex> create_system_model(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_system_model(%HardwareType{id: hardware_type_id}, attrs \\ %{}) do
{part_numbers, attrs} = Map.pop(attrs, :part_numbers, [])
changeset =
%SystemModel{tenant_id: Repo.get_tenant_id(), hardware_type_id: hardware_type_id}
|> SystemModel.changeset(attrs)
Multi.new()
|> Multi.run(:assoc_part_numbers, fn _repo, _changes ->
{:ok,
insert_or_get_part_numbers(SystemModelPartNumber, changeset, part_numbers, required: true)}
end)
|> Multi.insert(:system_model, fn %{assoc_part_numbers: changeset} ->
changeset
end)
|> Multi.run(:upload_system_model_picture, fn repo, %{system_model: system_model} ->
with {:ok, picture_file} <- Ecto.Changeset.fetch_change(changeset, :picture_file),
{:ok, picture_url} <-
Assets.upload_system_model_picture(system_model, picture_file) do
change_system_model(system_model, %{picture_url: picture_url})
|> repo.update()
else
# No :picture_file, no need to change
:error -> {:ok, system_model}
# Storage is disabled, ignore for now
{:error, :storage_disabled} -> {:ok, system_model}
{:error, reason} -> {:error, reason}
end
end)
|> Repo.transaction()
|> case do
{:ok, %{upload_system_model_picture: system_model}} ->
{:ok, Repo.preload(system_model, [:part_numbers, :hardware_type])}
{:error, _failed_operation, failed_value, _changes_so_far} ->
{:error, failed_value}
end
end
@doc """
Updates a system_model.
## Examples
iex> update_system_model(system_model, %{field: new_value})
{:ok, %SystemModel{}}
iex> update_system_model(system_model, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_system_model(%SystemModel{} = system_model, attrs) do
{part_numbers, attrs} = Map.pop(attrs, :part_numbers, [])
changeset =
SystemModel.changeset(system_model, attrs)
|> Ecto.Changeset.prepare_changes(fn changeset ->
# This handles the case of picture deletion or update with URL
case Ecto.Changeset.fetch_change(changeset, :picture_url) do
{:ok, _nil_or_url} ->
old_picture_url = changeset.data.picture_url
# We do our best to delete the existing picture, if it's in the store
_ = Assets.delete_system_model_picture(system_model, old_picture_url)
changeset
_ ->
changeset
end
end)
Multi.new()
|> Multi.run(:assoc_part_numbers, fn _repo, _changes ->
{:ok, insert_or_get_part_numbers(SystemModelPartNumber, changeset, part_numbers)}
end)
|> Multi.update(:system_model, fn %{assoc_part_numbers: changeset} ->
changeset
end)
|> Multi.run(:upload_system_model_picture, fn repo, %{system_model: system_model} ->
# This handles the case of picture update
with {:ok, picture_file} <- Ecto.Changeset.fetch_change(changeset, :picture_file),
{:ok, picture_url} <-
Assets.upload_system_model_picture(system_model, picture_file) do
# Retrieve the old picture, if any, from the original changeset
old_picture_url = changeset.data.picture_url
# Ignore the result here for now: a failure to delete the old picture shouldn't
# compromise the success of the operation (we would leave another orphan image anyway)
_ = Assets.delete_system_model_picture(system_model, old_picture_url)
change_system_model(system_model, %{picture_url: picture_url})
|> repo.update()
else
# No :picture_file, no need to change
:error -> {:ok, system_model}
# Storage is disabled, ignore for now
{:error, :storage_disabled} -> {:ok, system_model}
{:error, reason} -> {:error, reason}
end
end)
|> Repo.transaction()
|> case do
{:ok, %{upload_system_model_picture: system_model}} ->
{:ok, Repo.preload(system_model, [:part_numbers, :hardware_type])}
{:error, _failed_operation, failed_value, _changes_so_far} ->
{:error, failed_value}
end
end
@doc """
Deletes a system_model.
## Examples
iex> delete_system_model(system_model)
{:ok, %SystemModel{}}
iex> delete_system_model(system_model)
{:error, %Ecto.Changeset{}}
"""
def delete_system_model(%SystemModel{} = system_model) do
changeset = SystemModel.delete_changeset(system_model)
with {:ok, system_model} <- Repo.delete(changeset) do
# Delete the picture as well, if any.
# Ignore the result, a failure to delete the picture shouldn't compromise the success of
# the operation (we would leave another orphan image anyway)
_ = Assets.delete_system_model_picture(system_model, system_model.picture_url)
{:ok, system_model}
end
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking system_model changes.
## Examples
iex> change_system_model(system_model)
%Ecto.Changeset{data: %SystemModel{}}
"""
def change_system_model(%SystemModel{} = system_model, attrs \\ %{}) do
SystemModel.changeset(system_model, attrs)
end
@doc """
Inserts the tags passed in attrs within a multi transaction, normalizing them.
Returns the updated `%Ecto.Multi{}`.
"""
def ensure_tags_exist_multi(multi, %{tags: _tags} = attrs) do
multi
|> Multi.run(:cast_tags, fn _repo, _changes ->
data = %{}
types = %{tags: {:array, :string}}
changeset =
{data, types}
|> Ecto.Changeset.cast(attrs, Map.keys(types))
with {:ok, %{tags: tags}} <- Ecto.Changeset.apply_action(changeset, :insert) do
tenant_id = Repo.get_tenant_id()
now =
NaiveDateTime.utc_now()
|> NaiveDateTime.truncate(:second)
tag_maps =
for tag <- tags,
tag = normalize_tag(tag),
tag != "" do
%{name: tag, inserted_at: now, updated_at: now, tenant_id: tenant_id}
end
{:ok, tag_maps}
end
end)
|> Multi.insert_all(:insert_tags, Tag, & &1.cast_tags, on_conflict: :nothing)
|> Multi.run(:ensure_tags_exist, fn repo, %{cast_tags: tag_maps} ->
tag_names = for t <- tag_maps, do: t.name
{:ok, repo.all(from t in Tag, where: t.name in ^tag_names)}
end)
end
def ensure_tags_exist_multi(multi, _attrs) do
# No tags in the update, so we return nil for tags
Multi.run(multi, :ensure_tags_exist, fn _repo, _previous ->
{:ok, nil}
end)
end
defp normalize_tag(tag) do
tag
|> String.trim()
|> String.downcase()
end
end
| 28.658281 | 102 | 0.65801 |
798e5a36eb966579e189d341075bf9237902479c | 1,854 | ex | Elixir | lib/cotoami_web/controllers/amishi_controller.ex | cruatta/cotoami | 29b7ef66c053cf4e381c6ff65d5fd599066ebabe | [
"Apache-2.0"
] | 337 | 2016-11-28T15:46:58.000Z | 2022-03-01T06:21:25.000Z | lib/cotoami_web/controllers/amishi_controller.ex | cruatta/cotoami | 29b7ef66c053cf4e381c6ff65d5fd599066ebabe | [
"Apache-2.0"
] | 79 | 2017-02-27T05:44:36.000Z | 2021-12-09T00:28:11.000Z | lib/cotoami_web/controllers/amishi_controller.ex | cruatta/cotoami | 29b7ef66c053cf4e381c6ff65d5fd599066ebabe | [
"Apache-2.0"
] | 47 | 2018-02-03T01:32:13.000Z | 2021-11-08T07:54:43.000Z | defmodule CotoamiWeb.AmishiController do
use CotoamiWeb, :controller
require Logger
alias Cotoami.{Amishi, AmishiService, RedisService}
alias CotoamiWeb.AmishiView
def action(conn, _) do
apply(__MODULE__, action_name(conn), [conn, conn.params, conn.assigns.amishi])
end
def show(conn, %{"id" => id}, _amishi) do
AmishiService.get(id) |> render_amishi(conn)
end
def show_by_email(conn, %{"email" => email}, _amishi) do
AmishiService.get_by_email(email) |> render_amishi(conn)
end
defp render_amishi(amishi, conn) do
case amishi do
nil ->
send_resp(conn, :not_found, "")
amishi ->
render(conn, "amishi.json", amishi: amishi)
end
end
def invite(conn, %{"email" => email}, amishi) do
if AmishiService.can_invite_someone?(amishi) do
case AmishiService.get_by_email(email) do
nil ->
token = RedisService.generate_invite_token(email, amishi)
host_url = CotoamiWeb.Router.Helpers.url(conn)
email
|> CotoamiWeb.Email.invitation(token, host_url, amishi)
|> Cotoami.Mailer.deliver_now()
json(conn, "ok")
invitee ->
conn
|> put_status(:conflict)
|> json(Phoenix.View.render_one(invitee, AmishiView, "amishi.json"))
end
else
send_resp(conn, :unauthorized, "Invite limit exceeded")
end
end
def invitees(conn, _params, amishi) do
amishis = AmishiService.invitees(amishi)
render(conn, "amishis.json", %{amishis: amishis})
end
def refresh_email_user_data(conn, _params, %{owner: true}) do
email_users =
Amishi
|> Repo.all()
|> Enum.map(& &1.email)
|> Enum.reject(&is_nil/1)
|> Enum.map(&AmishiService.insert_or_update_by_email!/1)
text(conn, "#{length(email_users)} records updated.")
end
end
| 27.264706 | 82 | 0.645092 |
798e6c03d457a822ee06f58ecb9eafa1f9e7caa8 | 552 | ex | Elixir | exercises/practice/resistor-color-trio/.meta/example.ex | ryanzidago/elixir | d00ca743340fcc328c70ee351274f91b57fd4c8d | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/practice/resistor-color-trio/.meta/example.ex | ryanzidago/elixir | d00ca743340fcc328c70ee351274f91b57fd4c8d | [
"MIT"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | exercises/practice/resistor-color-trio/.meta/example.ex | ryanzidago/elixir | d00ca743340fcc328c70ee351274f91b57fd4c8d | [
"MIT"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule ResistorColorTrio do
@colors %{
black: 0,
brown: 1,
red: 2,
orange: 3,
yellow: 4,
green: 5,
blue: 6,
violet: 7,
grey: 8,
white: 9
}
@doc """
Calculate the resistance value in ohm or kiloohm from resistor colors
"""
@spec label(colors :: [atom]) :: {number, :ohms | :kiloohms}
def label([a, b, c]) do
value = (10 * @colors[a] + @colors[b]) * round(:math.pow(10, @colors[c]))
if value >= 1000 do
{div(value, 1000), :kiloohms}
else
{value, :ohms}
end
end
end
| 19.034483 | 77 | 0.547101 |
798ebddd4fff340a57f47bb36f4d4001a513e784 | 1,003 | ex | Elixir | lib/prometheus_logger.ex | rubberduck203/ex_prometheus_logger | d99c4e2eadde9c94aa14501e05cf7888d05bec44 | [
"MIT"
] | 1 | 2018-11-08T12:15:05.000Z | 2018-11-08T12:15:05.000Z | lib/prometheus_logger.ex | rubberduck203/ex_prometheus_logger | d99c4e2eadde9c94aa14501e05cf7888d05bec44 | [
"MIT"
] | null | null | null | lib/prometheus_logger.ex | rubberduck203/ex_prometheus_logger | d99c4e2eadde9c94aa14501e05cf7888d05bec44 | [
"MIT"
] | null | null | null | defmodule Logger.Backends.Prometheus do
@moduledoc false
@behaviour :gen_event
use Prometheus.Metric
def hostname do
with {:ok, hostname} <- :inet.gethostname() do
hostname
|> to_string()
|> String.trim()
end
end
## Logger implementation
@name :ex_logger
def init(_args) do
Counter.declare(
name: @name,
help: "Logged message count by level.",
labels: [:instance, :level]
)
backend_env = Application.get_env(:logger, __MODULE__, [level: :warn])
{:ok, backend_env}
end
def handle_event({level, _group_leader, {Logger, _message, _timestamp, _metadata}}, state) do
case Logger.compare_levels(level, Keyword.get(state, :level)) do
:lt -> nil
_ -> Counter.inc(name: @name, labels: [hostname(), level])
end
{:ok, state}
end
def handle_event(:flush, state) do
{:ok, state}
end
def handle_call({:configure, options}, state) do
{:ok, :ok, Keyword.merge(state, options)}
end
end
| 20.469388 | 95 | 0.636092 |
798f006a690200875e4dbba1d6927a544577a8c0 | 590 | exs | Elixir | exercises/concept/need-for-speed/mix.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/concept/need-for-speed/mix.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | exercises/concept/need-for-speed/mix.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule RemoteControlCar.MixProject do
use Mix.Project
def project do
[
app: :need_for_speed,
version: "0.1.0",
# elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 20.344828 | 87 | 0.586441 |
798f20206317e8829adc614eb4f923788ffe64b0 | 871 | ex | Elixir | app/lib/rocdev/meetup_api.ex | TomFrink/rocdev | 5c4a19c2d099bd8bad3377e6e08dfc9aa7e0bcf5 | [
"MIT"
] | 14 | 2017-10-10T19:11:21.000Z | 2019-04-20T20:11:01.000Z | app/lib/rocdev/meetup_api.ex | TomFrink/rocdev | 5c4a19c2d099bd8bad3377e6e08dfc9aa7e0bcf5 | [
"MIT"
] | 41 | 2017-10-08T03:07:20.000Z | 2018-10-15T12:47:34.000Z | app/lib/rocdev/meetup_api.ex | TomFrink/rocdev | 5c4a19c2d099bd8bad3377e6e08dfc9aa7e0bcf5 | [
"MIT"
] | 7 | 2017-10-18T10:44:04.000Z | 2019-04-15T20:44:49.000Z | defmodule Rocdev.MeetupAPI do
@moduledoc """
Provides access to Meetup API.
"""
@base_url Application.get_env(:rocdev, :meetup_api_base_url)
def past do
HTTPoison.get(
@base_url <> "/events",
[],
[params: [status: "past", page: 3, desc: true]]
)
|> handle_response
end
def upcoming do
HTTPoison.get(
@base_url <> "/events",
[],
[params: [status: "upcoming", page: 3]]
)
|> handle_response
end
def handle_response(response) do
with {:ok, response} <- response,
{:ok, body} <- process_response_body(response.body) do
{:ok, body}
else
{:error, reason} -> {:error, reason}
_ -> {:error, "unknown"}
end
end
def process_response_body(body) do
case Poison.decode(body) do
{:ok, json} -> {:ok, json}
_ -> {:error, body}
end
end
end | 20.738095 | 63 | 0.574053 |
798f3dc7c9ebf9dc073df9c5991a6b41924884ba | 1,276 | ex | Elixir | test/support/conn_case.ex | ScapeGuru/osrs_api_proxy | 7d4cace8e5185ec60a324de274df225a58049b7a | [
"Apache-2.0"
] | null | null | null | test/support/conn_case.ex | ScapeGuru/osrs_api_proxy | 7d4cace8e5185ec60a324de274df225a58049b7a | [
"Apache-2.0"
] | 37 | 2021-04-19T23:56:46.000Z | 2022-02-28T15:14:04.000Z | test/support/conn_case.ex | ScapeGuru/osrs_api_proxy | 7d4cace8e5185ec60a324de274df225a58049b7a | [
"Apache-2.0"
] | null | null | null | defmodule OsrsApiProxyWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use OsrsApiProxyWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
import Plug.Conn
import Phoenix.ConnTest
import OsrsApiProxyWeb.ConnCase
alias OsrsApiProxyWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint OsrsApiProxyWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(OsrsApiProxy.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(OsrsApiProxy.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 29 | 74 | 0.731975 |
798f4de8ef1aabb30986f2be5988840b8a8a10a7 | 17,920 | ex | Elixir | lib/mix/lib/mix/dep.ex | mszczygiel/elixir | 7dd86ec1f782debcb00d9f078478c3a9509a6375 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/dep.ex | mszczygiel/elixir | 7dd86ec1f782debcb00d9f078478c3a9509a6375 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/dep.ex | mszczygiel/elixir | 7dd86ec1f782debcb00d9f078478c3a9509a6375 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Dep do
@moduledoc false
@doc """
The Mix.Dep struct keeps information about your project dependencies.
It contains:
* `scm` - a module representing the source code management tool (SCM)
operations
* `app` - the application name as an atom
* `requirement` - a binary or regular expression with the dependency's requirement
* `status` - the current status of the dependency, check
`Mix.Dep.format_status/1` for more information
* `opts` - the options given by the developer
* `deps` - dependencies of this dependency
* `top_level` - true if dependency was defined in the top-level project
* `manager` - the project management, possible values:
`:rebar` | `:rebar3` | `:mix` | `:make` | `nil`
* `from` - path to the file where the dependency was defined
* `extra` - a slot for adding extra configuration based on the manager;
the information on this field is private to the manager and should not be
relied on
* `system_env` - an enumerable of key-value tuples of binaries to be set as environment variables
when loading or compiling the dependency
A dependency is in two specific states: loaded and unloaded.
When a dependency is unloaded, it means Mix only parsed its specification
and made no attempt to actually load the dependency or validate its
status. When the dependency is loaded, it means Mix attempted to fetch,
load and validate it, the status is set in the status field.
Furthermore, in the `opts` fields, Mix keeps some internal options, which
can be accessed by SCMs:
* `:app` - the application name
* `:dest` - the destination path for the dependency
* `:lock` - the lock information retrieved from mix.lock
* `:build` - the build path for the dependency
"""
defstruct scm: nil,
app: nil,
requirement: nil,
status: nil,
opts: [],
deps: [],
top_level: false,
extra: [],
manager: nil,
from: nil,
system_env: []
@type t :: %__MODULE__{
scm: Mix.SCM.t(),
app: atom,
requirement: String.t() | Regex.t() | nil,
status: atom,
opts: keyword,
top_level: boolean,
manager: :rebar | :rebar3 | :mix | :make | nil,
from: String.t(),
extra: term,
system_env: keyword
}
@doc """
Returns loaded dependencies from the cache for the current environment.
If dependencies have not been cached yet, they are loaded
and then cached.
Because the dependencies are cached during deps.loadpaths,
their status may be outdated (for example, `:compile` did not
yet become `:ok`). Therefore it is recommended to not rely
on their status, also given they haven't been checked
against the lock.
"""
def cached() do
if project = Mix.Project.get() do
read_cached_deps(project, {Mix.env(), Mix.target()}) || load_and_cache()
else
load_and_cache()
end
end
@doc """
Returns loaded dependencies recursively and caches it.
The result is cached for future `cached/0` calls.
## Exceptions
This function raises an exception if any of the dependencies
provided in the project are in the wrong format.
"""
def load_and_cache() do
env = Mix.env()
target = Mix.target()
case Mix.ProjectStack.top_and_bottom() do
{%{name: top, config: config}, %{name: bottom}} ->
write_cached_deps(top, {env, target}, load_and_cache(config, top, bottom, env, target))
_ ->
converge(env: env, target: target)
end
end
defp load_and_cache(_config, top, top, env, target) do
converge(env: env, target: target)
end
defp load_and_cache(config, _top, bottom, _env, _target) do
{_, deps} =
Mix.ProjectStack.read_cache({:cached_deps, bottom}) ||
raise "cannot retrieve dependencies information because dependencies were not loaded. " <>
"Please invoke one of \"deps.loadpaths\", \"loadpaths\", or \"compile\" Mix task"
app = Keyword.fetch!(config, :app)
seen = populate_seen(MapSet.new(), [app])
children = get_deps(deps, tl(Enum.uniq(get_children(deps, seen, [app]))))
top_level =
for dep <- deps,
dep.app == app,
child <- dep.deps,
do: {child.app, Keyword.get(child.opts, :optional, false)},
into: %{}
Enum.map(children, fn %{app: app, opts: opts} = dep ->
# optional only matters at the top level. Any non-top level dependency
# that is optional and is still available means it has been fulfilled.
case top_level do
%{^app => optional} ->
%{dep | top_level: true, opts: Keyword.put(opts, :optional, optional)}
%{} ->
%{dep | top_level: false, opts: Keyword.delete(opts, :optional)}
end
end)
end
defp read_cached_deps(project, env_target) do
case Mix.ProjectStack.read_cache({:cached_deps, project}) do
{^env_target, deps} -> deps
_ -> nil
end
end
defp write_cached_deps(project, env_target, deps) do
Mix.ProjectStack.write_cache({:cached_deps, project}, {env_target, deps})
deps
end
@doc """
Clears loaded dependencies from the cache for the current environment.
"""
def clear_cached() do
if project = Mix.Project.get() do
key = {:cached_deps, project}
Mix.ProjectStack.delete_cache(key)
end
end
@doc """
Returns loaded dependencies recursively on the given environment.
If no environment is passed, dependencies are loaded across all
environments. The result is not cached.
## Exceptions
This function raises an exception if any of the dependencies
provided in the project are in the wrong format.
"""
def load_on_environment(opts) do
converge(opts)
end
defp converge(opts) do
Mix.Dep.Converger.converge(nil, nil, opts, &{&1, &2, &3}) |> elem(0)
end
@doc """
Filters the given dependencies by name.
Raises if any of the names are missing.
"""
def filter_by_name(given, all_deps, opts \\ []) do
# Ensure all apps are atoms
apps = to_app_names(given)
deps =
if opts[:include_children] do
seen = populate_seen(MapSet.new(), apps)
get_deps(all_deps, Enum.uniq(get_children(all_deps, seen, apps)))
else
get_deps(all_deps, apps)
end
Enum.each(apps, fn app ->
unless Enum.any?(all_deps, &(&1.app == app)) do
Mix.raise("Unknown dependency #{app} for environment #{Mix.env()}")
end
end)
deps
end
defp get_deps(all_deps, apps) do
Enum.filter(all_deps, &(&1.app in apps))
end
defp get_children(_all_deps, _seen, []), do: []
defp get_children(all_deps, seen, apps) do
children_apps =
for %{deps: children} <- get_deps(all_deps, apps),
%{app: app} <- children,
app not in seen,
do: app
apps ++ get_children(all_deps, populate_seen(seen, children_apps), children_apps)
end
defp populate_seen(seen, apps) do
Enum.reduce(apps, seen, &MapSet.put(&2, &1))
end
@doc """
Runs the given `fun` inside the given dependency project by
changing the current working directory and loading the given
project onto the project stack.
It expects a loaded dependency as argument.
"""
def in_dependency(dep, post_config \\ [], fun)
def in_dependency(%Mix.Dep{app: app, opts: opts, scm: scm}, config, fun) do
# Set the app_path to be the one stored in the dependency.
# This is important because the name of application in the
# mix.exs file can be different than the actual name and we
# choose to respect the one in the mix.exs
config =
Keyword.merge(Mix.Project.deps_config(), config)
|> Keyword.put(:app_path, opts[:build])
|> Keyword.put(:build_scm, scm)
env = opts[:env] || :prod
old_env = Mix.env()
try do
Mix.env(env)
Mix.Project.in_project(app, opts[:dest], config, fun)
after
Mix.env(old_env)
end
end
@doc """
Formats the status of a dependency.
"""
def format_status(%Mix.Dep{status: {:ok, _vsn}}) do
"ok"
end
def format_status(%Mix.Dep{status: {:noappfile, {path, nil}}}) do
"could not find an app file at #{inspect(Path.relative_to_cwd(path))}. " <>
"This may happen if the dependency was not yet compiled " <>
"or the dependency indeed has no app file (then you can pass app: false as option)"
end
def format_status(%Mix.Dep{status: {:noappfile, {path, other_path}}}) do
other_app = Path.rootname(Path.basename(other_path))
"could not find an app file at #{inspect(Path.relative_to_cwd(path))}. " <>
"Another app file was found in the same directory " <>
"#{inspect(Path.relative_to_cwd(other_path))}, " <>
"try changing the dependency name to :#{other_app}"
end
def format_status(%Mix.Dep{status: {:invalidapp, path}}) do
"the app file at #{inspect(Path.relative_to_cwd(path))} is invalid"
end
def format_status(%Mix.Dep{status: {:invalidvsn, vsn}}) do
"the app file contains an invalid version: #{inspect(vsn)}"
end
def format_status(%Mix.Dep{status: {:nosemver, vsn}, requirement: req}) do
"the app file specified a non-Semantic Versioning format: #{inspect(vsn)}. Mix can only match the " <>
"requirement #{inspect(req)} against semantic versions. Please fix the application version " <>
"or use a regular expression as a requirement to match against any version"
end
def format_status(%Mix.Dep{status: {:nomatchvsn, vsn}, requirement: req}) do
"the dependency does not match the requirement #{inspect(req)}, got #{inspect(vsn)}"
end
def format_status(%Mix.Dep{status: {:lockmismatch, _}}) do
"lock mismatch: the dependency is out of date. To fetch locked version run \"mix deps.get\""
end
def format_status(%Mix.Dep{status: :lockoutdated}) do
"lock outdated: the lock is outdated compared to the options in your mix.exs. To fetch " <>
"locked version run \"mix deps.get\""
end
def format_status(%Mix.Dep{status: :nolock}) do
"the dependency is not locked. To generate the \"mix.lock\" file run \"mix deps.get\""
end
def format_status(%Mix.Dep{status: :compile}) do
"the dependency build is outdated, please run \"#{mix_env_var()}mix deps.compile\""
end
def format_status(%Mix.Dep{app: app, status: {:divergedreq, vsn, other}} = dep) do
"the dependency #{app} #{vsn}\n" <>
dep_status(dep) <>
"\n does not match the requirement specified\n" <>
dep_status(other) <>
"\n Ensure they match or specify one of the above in your deps and set \"override: true\""
end
def format_status(%Mix.Dep{app: app, status: {:divergedonly, other}} = dep) do
recommendation =
if Keyword.has_key?(other.opts, :only) do
"Ensure you specify at least the same environments in :only in your dep"
else
"Remove the :only restriction from your dep"
end
"the :only option for dependency #{app}\n" <>
dep_status(dep) <>
"\n does not match the :only option calculated for\n" <>
dep_status(other) <> "\n #{recommendation}"
end
def format_status(%Mix.Dep{app: app, status: {:divergedtargets, other}} = dep) do
recommendation =
if Keyword.has_key?(other.opts, :targets) do
"Ensure you specify at least the same targets in :targets in your dep"
else
"Remove the :targets restriction from your dep"
end
"the :targets option for dependency #{app}\n" <>
dep_status(dep) <>
"\n does not match the :targets option calculated for\n" <>
dep_status(other) <> "\n #{recommendation}"
end
def format_status(%Mix.Dep{app: app, status: {:diverged, other}} = dep) do
"different specs were given for the #{app} app:\n" <>
"#{dep_status(dep)}#{dep_status(other)}\n " <> override_diverge_recommendation(dep, other)
end
def format_status(%Mix.Dep{app: app, status: {:overridden, other}} = dep) do
"the dependency #{app} in #{Path.relative_to_cwd(dep.from)} is overriding a child dependency:\n" <>
"#{dep_status(dep)}#{dep_status(other)}\n " <> override_diverge_recommendation(dep, other)
end
def format_status(%Mix.Dep{status: {:unavailable, _}, scm: scm}) do
if scm.fetchable?() do
"the dependency is not available, run \"mix deps.get\""
else
"the dependency is not available"
end
end
def format_status(%Mix.Dep{status: {:elixirlock, _}}) do
"the dependency was built with an out-of-date Elixir version, run \"#{mix_env_var()}mix deps.compile\""
end
def format_status(%Mix.Dep{status: {:scmlock, _}}) do
"the dependency was built with another SCM, run \"#{mix_env_var()}mix deps.compile\""
end
defp override_diverge_recommendation(dep, other) do
if dep.opts[:from_umbrella] || other.opts[:from_umbrella] do
"Please remove the conflicting options from your definition"
else
"Ensure they match or specify one of the above in your deps and set \"override: true\""
end
end
defp dep_status(%Mix.Dep{} = dep) do
%{
app: app,
requirement: req,
manager: manager,
opts: opts,
from: from,
system_env: system_env
} = dep
opts =
[]
|> Kernel.++(if manager, do: [manager: manager], else: [])
|> Kernel.++(if system_env != [], do: [system_env: system_env], else: [])
|> Kernel.++(opts)
|> Keyword.drop([:dest, :build, :lock, :manager, :checkout])
info = if req, do: {app, req, opts}, else: {app, opts}
"\n > In #{Path.relative_to_cwd(from)}:\n #{inspect(info)}\n"
end
@doc """
Checks the lock for the given dependency and update its status accordingly.
"""
def check_lock(%Mix.Dep{scm: scm, opts: opts} = dep) do
if available?(dep) do
case scm.lock_status(opts) do
:mismatch ->
status = if rev = opts[:lock], do: {:lockmismatch, rev}, else: :nolock
%{dep | status: status}
:outdated ->
# Don't include the lock in the dependency if it is outdated
%{dep | status: :lockoutdated}
:ok ->
check_manifest(dep, opts[:build])
end
else
dep
end
end
defp check_manifest(%{scm: scm} = dep, build_path) do
vsn = {System.version(), :erlang.system_info(:otp_release)}
case Mix.Dep.ElixirSCM.read(Path.join(build_path, ".mix")) do
{:ok, old_vsn, _} when old_vsn != vsn ->
%{dep | status: {:elixirlock, old_vsn}}
{:ok, _, old_scm} when old_scm != scm ->
%{dep | status: {:scmlock, old_scm}}
_ ->
dep
end
end
@doc """
Returns `true` if the dependency is ok.
"""
def ok?(%Mix.Dep{status: {:ok, _}}), do: true
def ok?(%Mix.Dep{}), do: false
@doc """
Checks if a dependency is available.
Available dependencies are the ones that can be loaded.
"""
def available?(%Mix.Dep{status: {:unavailable, _}}), do: false
def available?(dep), do: not diverged?(dep)
@doc """
Checks if a dependency has diverged.
"""
def diverged?(%Mix.Dep{status: {:overridden, _}}), do: true
def diverged?(%Mix.Dep{status: {:diverged, _}}), do: true
def diverged?(%Mix.Dep{status: {:divergedreq, _}}), do: true
def diverged?(%Mix.Dep{status: {:divergedonly, _}}), do: true
def diverged?(%Mix.Dep{status: {:divergedtargets, _}}), do: true
def diverged?(%Mix.Dep{}), do: false
@doc """
Returns `true` if the dependency is compilable.
"""
def compilable?(%Mix.Dep{status: {:elixirlock, _}}), do: true
def compilable?(%Mix.Dep{status: {:noappfile, {_, _}}}), do: true
def compilable?(%Mix.Dep{status: {:scmlock, _}}), do: true
def compilable?(%Mix.Dep{status: :compile}), do: true
def compilable?(_), do: false
@doc """
Formats a dependency for printing.
"""
def format_dep(%Mix.Dep{scm: scm, app: app, status: status, opts: opts}) do
version =
case status do
{:ok, vsn} when vsn != nil -> "#{vsn} "
_ -> ""
end
"#{app} #{version}(#{scm.format(opts)})"
end
@doc """
Returns all load paths for the given dependency.
Automatically derived from source paths.
"""
def load_paths(%Mix.Dep{opts: opts} = dep) do
build_path = Path.dirname(opts[:build])
Enum.map(source_paths(dep), fn {_, base} ->
Path.join([build_path, base, "ebin"])
end)
end
@doc """
Returns all source paths.
Source paths are the directories that contain ebin files for a given
dependency. All managers, except `:rebar`, have only one source path.
"""
def source_paths(%Mix.Dep{manager: :rebar, app: app, opts: opts, extra: extra}) do
sub_dirs = extra[:sub_dirs] || []
dest = opts[:dest]
# Add root dir and all sub dirs with ebin/ directory
in_sub_dirs =
for sub_dir <- sub_dirs,
path <- Path.wildcard(Path.join(dest, sub_dir)),
File.dir?(Path.join(path, "ebin")),
do: {path, Path.basename(path)}
[{opts[:dest], Atom.to_string(app)}] ++ in_sub_dirs
end
def source_paths(%Mix.Dep{app: app, opts: opts}) do
[{opts[:dest], Atom.to_string(app)}]
end
@doc """
Returns `true` if dependency is a Mix project.
"""
def mix?(%Mix.Dep{manager: manager}) do
manager == :mix
end
@doc """
Returns `true` if dependency is a Rebar project.
"""
def rebar?(%Mix.Dep{manager: manager}) do
manager in [:rebar, :rebar3]
end
@doc """
Returns `true` if dependency is a Make project.
"""
def make?(%Mix.Dep{manager: manager}) do
manager == :make
end
## Helpers
defp mix_env_var do
if Mix.env() == :dev do
""
else
"MIX_ENV=#{Mix.env()} "
end
end
defp to_app_names(given) do
Enum.map(given, fn app ->
if is_binary(app), do: String.to_atom(app), else: app
end)
end
end
| 31.057192 | 107 | 0.636942 |
798f7f90eb52496d3438f6c77ad7160ba149dec9 | 1,213 | exs | Elixir | mix.exs | nshafer/plug_assign | cd25050a9d111ac11a0a8550b7078f8063af9452 | [
"MIT"
] | 5 | 2015-10-15T17:43:45.000Z | 2017-09-07T06:09:39.000Z | mix.exs | nshafer/plug_assign | cd25050a9d111ac11a0a8550b7078f8063af9452 | [
"MIT"
] | 1 | 2018-02-17T14:27:33.000Z | 2018-02-17T14:27:33.000Z | mix.exs | nshafer/plug_assign | cd25050a9d111ac11a0a8550b7078f8063af9452 | [
"MIT"
] | 1 | 2018-02-17T14:08:41.000Z | 2018-02-17T14:08:41.000Z | defmodule Plug.Assign.Mixfile do
use Mix.Project
def project do
[
app: :plug_assign,
name: "Plug.Assign",
version: "1.0.2",
elixir: "~> 1.1",
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps(),
description: description(),
package: package(),
docs: [extras: ["README.md", "LICENSE.md"], main: "readme"],
source_url: "https://github.com/nshafer/plug_assign",
homepage_url: "http://blog.lotech.org/a-phoenix-plug-for-assigning-template-variables.html"
]
end
def application do
[]
end
defp deps do
[
{:plug, "~> 1.5"},
{:ex_doc, "~> 0.21.1", only: :dev}
]
end
defp description do
"""
A simple plug to allow setting variables in a connection.
"""
end
defp package do
[
files: ["lib", "mix.exs", "README*", "LICENSE*"],
maintainers: ["Nathan Shafer"],
licenses: ["MIT"],
links: %{
"GitHub" => "https://github.com/nshafer/plug_assign",
"Docs" => "http://hexdocs.pm/plug_assign",
"Howto" => "http://blog.lotech.org/a-phoenix-plug-for-assigning-template-variables.html"
}
]
end
end
| 23.784314 | 97 | 0.567189 |
798f7fc8d00f1aa6a983d1a8f70836d7bca07f4e | 900 | ex | Elixir | umbrella/apps/sunulator_web/lib/sunulator_web/application.ex | plasticine/sunulator | f202518bae70cad06ea9d38e183c1b9873a77b18 | [
"MIT"
] | 2 | 2019-06-05T23:28:44.000Z | 2019-06-05T23:41:20.000Z | umbrella/apps/sunulator_web/lib/sunulator_web/application.ex | plasticine/sunulator | f202518bae70cad06ea9d38e183c1b9873a77b18 | [
"MIT"
] | 3 | 2021-03-09T12:30:08.000Z | 2021-09-01T19:32:06.000Z | umbrella/apps/sunulator_web/lib/sunulator_web/application.ex | plasticine/sunulator | f202518bae70cad06ea9d38e183c1b9873a77b18 | [
"MIT"
] | null | null | null | defmodule SunulatorWeb.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
# List all child processes to be supervised
children = [
# Start the endpoint when the application starts
SunulatorWeb.Endpoint
# Starts a worker by calling: SunulatorWeb.Worker.start_link(arg)
# {SunulatorWeb.Worker, arg},
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: SunulatorWeb.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
SunulatorWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 30 | 71 | 0.727778 |
798f8c7622d2d49173c044b4396ae00256589125 | 492 | ex | Elixir | lib/club_backend/release.ex | ufosc/club-backend-ex | 87b55bc9fcccd9d458ee3277c3153f608f3a8a5e | [
"MIT"
] | null | null | null | lib/club_backend/release.ex | ufosc/club-backend-ex | 87b55bc9fcccd9d458ee3277c3153f608f3a8a5e | [
"MIT"
] | 6 | 2020-07-31T23:02:34.000Z | 2021-02-26T21:10:10.000Z | lib/club_backend/release.ex | ufosc/club-backend-ex | 87b55bc9fcccd9d458ee3277c3153f608f3a8a5e | [
"MIT"
] | 2 | 2020-08-23T23:04:00.000Z | 2020-10-03T03:18:11.000Z | defmodule ClubBackend.Release do
@app :club_backend
def migrate do
load_app()
for repo <- repos() do
{:ok, _, _} = Ecto.Migrator.with_repo(repo, &Ecto.Migrator.run(&1, :up, all: true))
end
end
def rollback(repo, version) do
load_app()
{:ok, _, _} = Ecto.Migrator.with_repo(repo, &Ecto.Migrator.run(&1, :down, to: version))
end
defp repos do
Application.fetch_env!(@app, :ecto_repos)
end
defp load_app do
Application.load(@app)
end
end
| 19.68 | 91 | 0.642276 |
798fa8ea9094347372de2b1eb0b804276f38d5a5 | 372 | ex | Elixir | fixtures/elixir_output/post_json_multiple_headers.ex | martinsirbe/curlconverter | c5324e85d2ca24ef4743fb2bb36139d23367e293 | [
"MIT"
] | 4,955 | 2015-01-02T09:04:20.000Z | 2021-10-06T03:54:43.000Z | fixtures/elixir_output/post_json_multiple_headers.ex | martinsirbe/curlconverter | c5324e85d2ca24ef4743fb2bb36139d23367e293 | [
"MIT"
] | 242 | 2015-03-27T05:59:11.000Z | 2021-10-03T08:36:05.000Z | fixtures/elixir_output/post_json_multiple_headers.ex | martinsirbe/curlconverter | c5324e85d2ca24ef4743fb2bb36139d23367e293 | [
"MIT"
] | 504 | 2015-01-02T16:04:36.000Z | 2021-10-01T03:43:55.000Z | request = %HTTPoison.Request{
method: :post,
url: "https://0.0.0.0/rest/login-sessions",
options: [hackney: [:insecure]],
headers: [
{~s|Content-Type|, ~s|application/json|},
{~s|X-API-Version|, ~s|200|},
],
params: [],
body: ~s|{"userName":"username123","password":"password123", "authLoginDomain":"local"}|
}
response = HTTPoison.request(request)
| 26.571429 | 90 | 0.63172 |
798fc10e8d158c19535adb95cbbed581adb85e07 | 123 | exs | Elixir | test/bt_test.exs | fly1ngDream/bt | 81d55ca7a524321fc2af1c255805b9f6826561d8 | [
"MIT"
] | 1 | 2020-07-29T14:39:39.000Z | 2020-07-29T14:39:39.000Z | test/bt_test.exs | yevhenshymotiuk/bt | 81d55ca7a524321fc2af1c255805b9f6826561d8 | [
"MIT"
] | null | null | null | test/bt_test.exs | yevhenshymotiuk/bt | 81d55ca7a524321fc2af1c255805b9f6826561d8 | [
"MIT"
] | null | null | null | defmodule BtTest do
use ExUnit.Case
doctest Bt
test "greets the world" do
assert Bt.hello() == :world
end
end
| 13.666667 | 31 | 0.674797 |
798fc46bfc32ed95354174ec220bb463b709afb8 | 1,091 | ex | Elixir | lib/blog_phx_web/channels/user_socket.ex | the-harry/blog_phx | db7604259f2a1cd32b0243bb1039694ef986b3aa | [
"MIT"
] | null | null | null | lib/blog_phx_web/channels/user_socket.ex | the-harry/blog_phx | db7604259f2a1cd32b0243bb1039694ef986b3aa | [
"MIT"
] | null | null | null | lib/blog_phx_web/channels/user_socket.ex | the-harry/blog_phx | db7604259f2a1cd32b0243bb1039694ef986b3aa | [
"MIT"
] | null | null | null | defmodule BlogPhxWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", BlogPhxWeb.RoomChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
@impl true
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# BlogPhxWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
@impl true
def id(_socket), do: nil
end
| 30.305556 | 83 | 0.694775 |
798fe9507cf50887f7c204d2bb2fddb1f609e04a | 1,747 | ex | Elixir | lib/esi.ex | ajaxify/esi | 04714de809bca83fd84191e08dbaf39ccbad45c8 | [
"MIT"
] | null | null | null | lib/esi.ex | ajaxify/esi | 04714de809bca83fd84191e08dbaf39ccbad45c8 | [
"MIT"
] | null | null | null | lib/esi.ex | ajaxify/esi | 04714de809bca83fd84191e08dbaf39ccbad45c8 | [
"MIT"
] | null | null | null | defmodule ESI do
@doc """
Execute a request.
## Arguments
- `request` -- the request
- `opts` -- any additional options to set on the request
"""
@spec request!(req :: ESI.Request.t(), opts :: ESI.Request.request_opts()) ::
{:ok, any} | {:error, any}
def request(req, opts \\ []) do
req
|> ESI.Request.options(opts)
|> ESI.Request.run()
end
@doc """
Execute a request and raise an error if it is not successful.
"""
@spec request!(req :: ESI.Request.t(), opts :: ESI.Request.request_opts()) :: any
def request!(req, opts \\ []) do
case request(req, opts) do
{:ok, result} ->
result
{:error, err} ->
raise "Request failed: #{err}"
end
end
@doc """
Generate a stream from a request, supporting automatic pagination.
## Examples
Paginating, without `stream!`; you need to manually handle incrementing the
`:page` option:
iex> ESI.API.Universe.groups() |> ESI.request! |> length
1000
iex> ESI.API.Universe.groups(page: 2) |> ESI.request! |> length
447
Paginating with `stream!`, you don't have to care about `:page`:
iex> ESI.API.Universe.groups() |> ESI.stream! |> Enum.take(1020) |> length
1020
Like any stream, you can use `Enum.to_list/1` to get all the items:
iex> ESI.API.Universe.groups() |> ESI.stream! |> Enum.to_list |> length
1447
It even works for requests that don't paginate:
iex> ESI.API.Universe.bloodlines() |> ESI.stream! |> Enum.to_list |> length
18
"""
@spec stream!(req :: ESI.Request.t(), opts :: ESI.Request.request_opts()) :: any
def stream!(req, opts \\ []) do
req
|> ESI.Request.options(opts)
|> ESI.Request.stream!()
end
end
| 25.691176 | 83 | 0.605037 |
799036b48b3221e314307f0ac3f70951a7dcb1ae | 5,509 | ex | Elixir | lib/stripe/webhook_plug.ex | erhlee-bird/stripity_stripe | 8c4c5712f391bf76e0a168125882c85048d3192f | [
"BSD-3-Clause"
] | 555 | 2016-11-29T05:02:27.000Z | 2022-03-30T00:47:59.000Z | lib/stripe/webhook_plug.ex | erhlee-bird/stripity_stripe | 8c4c5712f391bf76e0a168125882c85048d3192f | [
"BSD-3-Clause"
] | 532 | 2016-11-28T18:22:25.000Z | 2022-03-30T17:04:32.000Z | lib/stripe/webhook_plug.ex | erhlee-bird/stripity_stripe | 8c4c5712f391bf76e0a168125882c85048d3192f | [
"BSD-3-Clause"
] | 296 | 2016-12-05T14:04:09.000Z | 2022-03-28T20:39:37.000Z | defmodule Stripe.WebhookPlug do
@moduledoc """
Helper `Plug` to process webhook events and send them to a custom handler.
## Installation
To handle webhook events, you must first configure your application's endpoint.
Add the following to `endpoint.ex`, **before** `Plug.Parsers` is loaded.
```elixir
plug Stripe.WebhookPlug,
at: "/webhook/stripe",
handler: MyAppWeb.StripeHandler,
secret: "whsec_******"
```
If you have not yet added a webhook to your Stripe account, you can do so
by visiting `Developers > Webhooks` in the Stripe dashboard. Use the route
you configured in the endpoint above and copy the webhook secret into your
app's configuration.
### Supported options
- `at`: The URL path your application should listen for Stripe webhooks on.
Configure this to match whatever you set in the webhook.
- `handler`: Custom event handler module that accepts `Stripe.Event` structs
and processes them within your application. You must create this module.
- `secret`: Webhook secret starting with `whsec_` obtained from the Stripe
dashboard. This can also be a function or a tuple for runtime configuration.
- `tolerance`: Maximum age (in seconds) allowed for the webhook event.
See `Stripe.Webhook.construct_event/4` for more information.
## Handling events
You will need to create a custom event handler module to handle events.
Your event handler module should implement the `Stripe.WebhookHandler`
behavior, defining a `handle_event/1` function which takes a `Stripe.Event`
struct and returns either `{:ok, term}` or `:ok`.
### Example
```elixir
# lib/myapp_web/stripe_handler.ex
defmodule MyAppWeb.StripeHandler do
@behaviour Stripe.WebhookHandler
@impl true
def handle_event(%Stripe.Event{type: "charge.succeeded"} = event) do
# TODO: handle the charge.succeeded event
end
@impl true
def handle_event(%Stripe.Event{type: "invoice.payment_failed"} = event) do
# TODO: handle the invoice.payment_failed event
end
# Return HTTP 200 for unhandled events
@impl true
def handle_event(_event), do: :ok
end
```
## Configuration
You can configure the webhook secret in your app's own config file.
For example:
```elixir
config :myapp,
# [...]
stripe_webhook_secret: "whsec_******"
```
You may then include the secret in your endpoint:
```elixir
plug Stripe.WebhookPlug,
at: "/webhook/stripe",
handler: MyAppWeb.StripeHandler,
secret: Application.get_env(:myapp, :stripe_webhook_secret)
```
### Runtime configuration
If you're loading config dynamically at runtime (eg with `runtime.exs`
or an OTP app) you must pass a tuple or function as the secret.
```elixir
# With a tuple
plug Stripe.WebhookPlug,
at: "/webhook/stripe",
handler: MyAppWeb.StripeHandler,
secret: {Application, :get_env, [:myapp, :stripe_webhook_secret]}
# Or, with a function
plug Stripe.WebhookPlug,
at: "/webhook/stripe",
handler: MyAppWeb.StripeHandler,
secret: fn -> Application.get_env(:myapp, :stripe_webhook_secret) end
```
"""
import Plug.Conn
alias Plug.Conn
@behaviour Plug
@impl true
def init(opts) do
path_info = String.split(opts[:at], "/", trim: true)
opts
|> Enum.into(%{})
|> Map.put_new(:path_info, path_info)
end
@impl true
def call(
%Conn{method: "POST", path_info: path_info} = conn,
%{
path_info: path_info,
secret: secret,
handler: handler
} = opts
) do
secret = parse_secret!(secret)
with [signature] <- get_req_header(conn, "stripe-signature"),
{:ok, payload, _} = Conn.read_body(conn),
{:ok, %Stripe.Event{} = event} <- construct_event(payload, signature, secret, opts),
:ok <- handle_event!(handler, event) do
send_resp(conn, 200, "Webhook received.") |> halt()
else
_ -> send_resp(conn, 400, "Bad request.") |> halt()
end
end
@impl true
def call(%Conn{path_info: path_info} = conn, %{path_info: path_info}) do
send_resp(conn, 400, "Bad request.") |> halt()
end
@impl true
def call(conn, _), do: conn
defp construct_event(payload, signature, secret, %{tolerance: tolerance}) do
Stripe.Webhook.construct_event(payload, signature, secret, tolerance)
end
defp construct_event(payload, signature, secret, _opts) do
Stripe.Webhook.construct_event(payload, signature, secret)
end
defp handle_event!(handler, %Stripe.Event{} = event) do
case handler.handle_event(event) do
{:ok, _} ->
:ok
:ok ->
:ok
resp ->
raise """
#{inspect(handler)}.handle_event/1 returned an invalid response. Expected {:ok, term} or :ok
Got: #{inspect(resp)}
Event data: #{inspect(event)}
"""
end
end
defp parse_secret!({m, f, a}), do: apply(m, f, a)
defp parse_secret!(fun) when is_function(fun), do: fun.()
defp parse_secret!(secret) when is_binary(secret), do: secret
defp parse_secret!(secret) do
raise """
The Stripe webhook secret is invalid. Expected a string, tuple, or function.
Got: #{inspect(secret)}
If you're setting the secret at runtime, you need to pass a tuple or function.
For example:
plug Stripe.WebhookPlug,
at: "/webhook/stripe",
handler: MyAppWeb.StripeHandler,
secret: {Application, :get_env, [:myapp, :stripe_webhook_secret]}
"""
end
end
| 28.544041 | 100 | 0.670176 |
799045c121a4e8b09a86f7b77d64722fee6c0f6f | 1,972 | ex | Elixir | lib/cryptozaur/model/account.ex | DenisGorbachev/crypto-cli | 94e5097ff24237fbc5fdd3fea371a5c9a1f727e4 | [
"MIT"
] | 5 | 2018-09-19T09:13:15.000Z | 2021-10-20T23:29:57.000Z | lib/cryptozaur/model/account.ex | DenisGorbachev/crypto-cli | 94e5097ff24237fbc5fdd3fea371a5c9a1f727e4 | [
"MIT"
] | 6 | 2018-07-29T05:33:02.000Z | 2018-09-18T20:42:19.000Z | lib/cryptozaur/model/account.ex | DenisGorbachev/crypto-cli | 94e5097ff24237fbc5fdd3fea371a5c9a1f727e4 | [
"MIT"
] | 3 | 2018-07-24T05:55:04.000Z | 2018-09-19T09:14:08.000Z | defmodule Cryptozaur.Model.Account do
@moduledoc """
## Rationale
* We want to allow developers to connect their own accounts to our system
* Also, we want to spread our funds among different accounts to avoid withdrawal limits
"""
use Ecto.Schema
import Ecto.Query
import Ecto.Changeset
import OK, only: [success: 1, failure: 1]
alias Cryptozaur.Repo
alias Cryptozaur.Connector
alias Cryptozaur.Model.{Balance}
schema "accounts" do
field(:exchange, :string)
field(:key, :string)
field(:secret, :string)
timestamps()
has_many(:balances, Balance)
end
@fields [:exchange, :key, :secret]
@required @fields
def fields, do: @fields
def changeset(account, params \\ %{}) do
account
|> cast(params, @fields)
|> validate_required(@required)
|> validate_change(:exchange, &exchange_is_supported/2)
end
def exchange_is_supported(field, value) do
case Connector.get_exchange_by_slug(value) do
success(_) -> []
failure(_) -> [{field, "not supported"}]
end
end
def all_by_exchange(exchange) do
from(
o in __MODULE__,
where: o.exchange == ^exchange,
order_by: [asc: :id]
)
|> Repo.all()
end
def first_by_exchange(exchange) do
from(
o in __MODULE__,
where: o.exchange == ^exchange,
order_by: [asc: :id]
)
|> Repo.one!()
end
def all_by_keys(keys) do
from(
o in __MODULE__,
where: o.key in ^keys
)
|> Repo.all()
end
def one_by_key(key) do
__MODULE__ |> Repo.get_by(key: key)
end
def get_latest_id() do
from(
o in __MODULE__,
select: o.id,
order_by: [desc: o.id],
limit: 1
)
|> Repo.one!()
end
def all_as_maps(fields \\ @fields) do
from(
o in __MODULE__,
select: map(o, ^fields),
# stabilize tests
order_by: [asc: o.id]
)
|> Repo.all()
end
def all() do
__MODULE__ |> Repo.all()
end
end
| 20.122449 | 89 | 0.616633 |
799057e8423a38b0ef5d591f0063557a432ce08c | 1,502 | exs | Elixir | mix.exs | emadurandal/pelemay | 9d0cf707fdf545f7179768309779309dd02277e0 | [
"Apache-2.0"
] | 192 | 2019-08-30T06:53:28.000Z | 2022-02-15T06:08:25.000Z | mix.exs | emadurandal/pelemay | 9d0cf707fdf545f7179768309779309dd02277e0 | [
"Apache-2.0"
] | 121 | 2019-09-02T08:13:25.000Z | 2021-12-03T19:04:16.000Z | mix.exs | emadurandal/pelemay | 9d0cf707fdf545f7179768309779309dd02277e0 | [
"Apache-2.0"
] | 15 | 2019-08-31T12:00:03.000Z | 2021-07-03T04:13:10.000Z | defmodule Pelemay.MixProject do
use Mix.Project
def project do
[
app: :pelemay,
version: "0.0.15",
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
description: description(),
package: package(),
deps: deps(),
docs: [
api_reference: false,
main: "Pelemay"
],
elixirc_paths: elixirc_paths(Mix.env())
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:cpu_info, "~> 0.2.1"},
{:ring_logger, "~> 0.6"},
# Docs dependencies
{:ex_doc, ">= 0.0.0", only: :dev},
{:mix_test_watch, "~> 1.0", only: :dev, runtime: false}
]
end
defp description() do
"Pelemay = The Penta (Five) “Elemental Way”: Freedom, Insight, Beauty, Efficiency and Robustness"
end
defp package() do
[
name: "pelemay",
maintainers: [
"Susumu Yamazaki",
"Masakazu Mori",
"Yoshihiro Ueno",
"Hideki Takase",
"Yuki Hisae"
],
licenses: ["Apache 2.0"],
links: %{"GitHub" => "https://github.com/zeam-vm/pelemay"},
files: [
# These are the default files
"lib",
"LICENSE.txt",
"mix.exs",
"README.md"
]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
end
| 21.768116 | 101 | 0.540613 |
79907a9992187fb7f9c4ea93f32c70d7070c79da | 9,436 | ex | Elixir | lib/asteroid/oauth2/client.ex | tanguilp/asteroid | 8e03221d365da7f03f82df192c535d3ba2101f4d | [
"Apache-2.0"
] | 36 | 2019-07-23T20:01:05.000Z | 2021-08-05T00:52:34.000Z | lib/asteroid/oauth2/client.ex | tanguilp/asteroid | 8e03221d365da7f03f82df192c535d3ba2101f4d | [
"Apache-2.0"
] | 19 | 2019-08-23T19:04:50.000Z | 2021-05-07T22:12:25.000Z | lib/asteroid/oauth2/client.ex | tanguilp/asteroid | 8e03221d365da7f03f82df192c535d3ba2101f4d | [
"Apache-2.0"
] | 3 | 2019-09-06T10:47:20.000Z | 2020-09-09T03:43:31.000Z | defmodule Asteroid.OAuth2.Client do
alias Asteroid.Client
alias Asteroid.OAuth2
alias OAuth2Utils.Scope
import Asteroid.Utils
@moduledoc """
Util function to work with OAuth2 clients
"""
defmodule AuthenticationError do
@moduledoc """
Error raised when an client authentication error occurs
"""
defexception [:reason]
@type t :: %__MODULE__{
reason: :unknown_client | :unauthenticated_request
}
@impl true
def message(%__MODULE__{reason: reason}) do
case astrenv(:api_error_response_verbosity) do
:debug ->
"authentication error: #{String.replace(Atom.to_string(reason), "_", " ")}"
:normal ->
"authentication error: #{String.replace(Atom.to_string(reason), "_", " ")}"
:minimal ->
""
end
end
end
defmodule AuthorizationError do
@moduledoc """
Error raised when an client is not authorized to perform an action
"""
defexception [:reason]
@type t :: %__MODULE__{
reason: :unauthorized_grant_type | :unauthorized_scope
}
def message(%__MODULE__{reason: reason}) do
case astrenv(:api_error_response_verbosity) do
:debug ->
"The client is not authorized to perform this action (reason: #{inspect(reason)})"
:normal ->
"The client is not authorized to perform this action (reason: #{inspect(reason)})"
:minimal ->
""
end
end
end
@typedoc """
Client's type: confidential or public
"""
@type type :: :confidential | :public
@typedoc """
String representation of a client type
Must be the string conversion of a `t:type/0` atom.
"""
@type type_str :: String.t()
@doc """
Returns the authenticated or **unauthenticated** client of a request
To make sure that the client is authenticated, one shall use the `get_authenticated_client/1`
function instead.
"""
@spec get_client(Plug.Conn.t()) ::
{:ok, Client.t()}
| {:error, %AuthenticationError{}}
def get_client(conn) do
case get_authenticated_client(conn) do
{:ok, client} ->
{:ok, client}
{:error, %AuthenticationError{reason: :unkown_client}} = error ->
error
{:error, _} ->
get_unauthenticated_client(conn)
end
end
@doc """
Returns the APIac authenticated client, or an error if none was found
"""
@spec get_authenticated_client(Plug.Conn.t()) ::
{:ok, Client.t()}
| {:error, %AuthenticationError{}}
def get_authenticated_client(conn) do
if APIac.authenticated?(conn) do
case Client.load_from_unique_attribute("client_id", APIac.client(conn)) do
{:ok, client} ->
{:ok, client}
{:error, _} ->
{:error, AuthenticationError.exception(reason: :unkown_client)}
end
else
{:error, AuthenticationError.exception(reason: :unauthenticated_request)}
end
end
@doc """
Returns the unauthenticated client of a request
It does so by reading the `"client_id"` body parameter and trying to find the associated
**public** client in the client's attribute repository. If it is found and it has no
credentials (calling the `has_credentials?/1` function), it returns the client. Otherwise
an error is returned.
"""
@spec get_unauthenticated_client(Plug.Conn.t()) ::
{:ok, Client.t()}
| {:error, %AuthenticationError{}}
def get_unauthenticated_client(conn) do
case conn.body_params["client_id"] do
nil ->
{:error, AuthenticationError.exception(reason: :unauthenticated_request)}
client_id ->
if OAuth2Utils.valid_client_id_param?(client_id) do
case Client.load_from_unique_attribute("client_id", client_id) do
{:ok, client} ->
if public?(client) do
if not has_credentials?(client) do
{:ok, client}
else
{:error,
AuthenticationError.exception(
reason: :public_client_has_credentials_and_must_authenticate
)}
end
else
{:error, AuthenticationError.exception(reason: :unauthenticated_request)}
end
{:error, _} ->
{:error, AuthenticationError.exception(reason: :unkown_client)}
end
else
{:error,
OAuth2.Request.MalformedParamError.exception(
name: "client_id",
value: client_id
)}
end
end
end
@doc """
Returns `true` if the client is allowed to use the grant type, `false` otherwise
To be authorized to use a given grant type, the client's `"grant_types"` attribute
must contain the given `t:Asteroid.OAuth2.grant_type_str/0`.
"""
@spec grant_type_authorized?(Asteroid.Client.t(), Asteroid.OAuth2.grant_type_str()) ::
:ok
| {:error, %AuthorizationError{}}
def grant_type_authorized?(client, grant_type) do
client = Client.fetch_attributes(client, ["grant_types"])
if grant_type in client.attrs["grant_types"] do
:ok
else
{:error, AuthorizationError.exception(reason: :unauthorized_grant_type)}
end
end
@doc """
Returns `true` if the client is allowed to use the response type, `false` otherwise
To be authorized to use a given grant type, the client's `"response_types"` attribute
must contain the given `t:Asteroid.OAuth2.response_type_str/0`.
"""
@spec response_type_authorized?(Asteroid.Client.t(), Asteroid.OAuth2.response_type_str()) ::
:ok
| {:error, %AuthorizationError{}}
def response_type_authorized?(client, response_type) do
client = Client.fetch_attributes(client, ["response_types"])
if response_type in (client.attrs["response_types"] || []) do
:ok
else
{:error, AuthorizationError.exception(reason: :unauthorized_response_type)}
end
end
@doc """
Returns `true` if the client is authorized to use the scopes, `false` otherwise
Checks for each scope of the `Scope.Set.t()` if it's included in the client's `"scope"`
attribute.
"""
@spec scopes_authorized?(Asteroid.Client.t(), Scope.Set.t()) ::
:ok
| {:error, %AuthorizationError{}}
def scopes_authorized?(client, scope_set) do
client = Client.fetch_attributes(client, ["scope"])
if Scope.Set.subset?(scope_set, Scope.Set.new(client.attrs["scope"] || [])) do
:ok
else
{:error, AuthorizationError.exception(reason: :unauthorized_scope)}
end
end
@doc """
Returns `true` if the client is a public client, `false` otherwise
"""
@spec public?(Client.t()) :: boolean()
def public?(client) do
client = Client.fetch_attributes(client, ["client_type"])
client.attrs["client_type"] == "public"
end
@doc """
Returns `true` if the client is a confidential client, `false` otherwise
"""
@spec confidential?(Client.t()) :: boolean()
def confidential?(client) do
client = Client.fetch_attributes(client, ["client_type"])
client.attrs["client_type"] == "confidential"
end
@doc """
Returns `true` if the client has credentials, `false` otherwise
A client that has credentials is a client that has a `client_secret` attribute
"""
@spec has_credentials?(Client.t()) :: boolean()
def has_credentials?(client) do
client = Client.fetch_attributes(client, ["client_secret"])
client.attrs["client_secret"] != nil
end
@doc """
Returns `:ok` is the client is authorized to introspect tokens on the `"/introspect"`
endpoint, `{:error, :unauthorized}` otherwise
An authorized client is a client that has been granted the use of the `"asteroid.introspect"`
scope. See [Configuring clients - Asteroid scopes](configuring-clients.html#asteroid-scopes)
for information on scopes.
"""
@spec endpoint_introspect_authorized?(Client.t()) :: :ok | {:error, :unauthorized}
def endpoint_introspect_authorized?(client) do
client = Client.fetch_attributes(client, ["scope"])
if "asteroid.introspect" in (client.attrs["scope"] || []) do
:ok
else
{:error, :unauthorized}
end
end
@doc """
Returns `true` is the client must use PKCE, `false` otherwise
A client must use PKCE when its
`"__asteroid_oauth2_mandatory_pkce_use"` attribute is set to `true`.
"""
@spec must_use_pkce?(Client.t()) :: boolean()
def must_use_pkce?(client) do
attribute = "__asteroid_oauth2_mandatory_pkce_use"
client = Client.fetch_attributes(client, [attribute])
client.attrs[attribute] == true
end
@doc """
Returns the client secret from the client id of a client
Can be used in as a callback in `APIacAuthBasic` and `APIacAuthClientSecretPost` in the
configuration files:
```elixir
{APIacAuthBasic,
realm: "Asteroid",
callback: &Asteroid.OAuth2.Client.get_client_secret/2,
set_error_response: &APIacAuthBasic.save_authentication_failure_response/3,
error_response_verbosity: :debug}
```
"""
@spec get_client_secret(String.t(), String.t()) :: String.t()
def get_client_secret(_realm, client_id) do
case Client.load_from_unique_attribute("client_id", client_id, attributes: ["client_secret"]) do
{:ok, client} ->
client.attrs["client_secret"]
{:error, _} ->
nil
end
end
end
| 28.083333 | 100 | 0.647944 |
79907cb20a0ef54f78ff12e1d03ac969e5c215d3 | 673 | exs | Elixir | test/prima_auth0_ex/token_provider/token_info_test.exs | primait/auth0_ex | 15ef5d6d91d8fe00ff703a4f58e1cb32bb169a82 | [
"MIT"
] | 5 | 2021-12-01T10:50:40.000Z | 2022-02-15T13:07:02.000Z | test/prima_auth0_ex/token_provider/token_info_test.exs | primait/auth0_ex | 15ef5d6d91d8fe00ff703a4f58e1cb32bb169a82 | [
"MIT"
] | 11 | 2021-12-22T09:19:28.000Z | 2022-03-24T06:15:04.000Z | test/prima_auth0_ex/token_provider/token_info_test.exs | primait/auth0_ex | 15ef5d6d91d8fe00ff703a4f58e1cb32bb169a82 | [
"MIT"
] | null | null | null | defmodule PrimaAuth0Ex.TokenProvider.TokenInfoTest do
use ExUnit.Case, async: true
alias PrimaAuth0Ex.TestSupport.JwtUtils
alias PrimaAuth0Ex.TokenProvider.TokenInfo
test "extracts metadata from token" do
issued_at = Timex.now() |> Timex.shift(hours: -12) |> Timex.to_unix()
expires_at = Timex.now() |> Timex.shift(hours: 12) |> Timex.to_unix()
token = JwtUtils.generate_fake_jwt("some-audience", %{iat: issued_at, exp: expires_at}, %{kid: "my-kid"})
assert %TokenInfo{
jwt: token,
kid: "my-kid",
issued_at: issued_at,
expires_at: expires_at
} == TokenInfo.from_jwt(token)
end
end
| 33.65 | 109 | 0.656761 |
79907d08e1b4e3375e4623982f2d94b36140aaeb | 991 | exs | Elixir | test/json_schema_test_suite/draft7/optional/format/relative_json_pointer_test.exs | hrzndhrn/json_xema | 955eab7b0919d144b38364164d90275201c89474 | [
"MIT"
] | 54 | 2019-03-10T19:51:07.000Z | 2021-12-23T07:31:09.000Z | test/json_schema_test_suite/draft7/optional/format/relative_json_pointer_test.exs | hrzndhrn/json_xema | 955eab7b0919d144b38364164d90275201c89474 | [
"MIT"
] | 36 | 2018-05-20T09:13:20.000Z | 2021-03-14T15:22:03.000Z | test/json_schema_test_suite/draft7/optional/format/relative_json_pointer_test.exs | hrzndhrn/json_xema | 955eab7b0919d144b38364164d90275201c89474 | [
"MIT"
] | 3 | 2019-04-12T09:08:51.000Z | 2019-12-04T01:23:56.000Z | defmodule JsonSchemaTestSuite.Draft7.Optional.Format.RelativeJsonPointerTest do
use ExUnit.Case
import JsonXema, only: [valid?: 2]
describe ~s|validation of Relative JSON Pointers (RJP)| do
setup do
%{schema: JsonXema.new(%{"format" => "relative-json-pointer"})}
end
test ~s|a valid upwards RJP|, %{schema: schema} do
assert valid?(schema, "1")
end
test ~s|a valid downwards RJP|, %{schema: schema} do
assert valid?(schema, "0/foo/bar")
end
test ~s|a valid up and then down RJP, with array index|, %{schema: schema} do
assert valid?(schema, "2/0/baz/1/zip")
end
test ~s|a valid RJP taking the member or index name|, %{schema: schema} do
assert valid?(schema, "0#")
end
test ~s|an invalid RJP that is a valid JSON Pointer|, %{schema: schema} do
refute valid?(schema, "/foo/bar")
end
test ~s|negative prefix|, %{schema: schema} do
refute valid?(schema, "-1/foo/bar")
end
end
end
| 27.527778 | 81 | 0.638749 |
7990a8188871c2a21d0aafcc34c1baa088922a66 | 3,407 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/batch_update_values_by_data_filter_request.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/batch_update_values_by_data_filter_request.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/batch_update_values_by_data_filter_request.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Sheets.V4.Model.BatchUpdateValuesByDataFilterRequest do
@moduledoc """
The request for updating more than one range of values in a spreadsheet.
## Attributes
- data ([DataFilterValueRange]): The new values to apply to the spreadsheet. If more than one range is matched by the specified DataFilter the specified values will be applied to all of those ranges. Defaults to: `null`.
- includeValuesInResponse (boolean()): Determines if the update response should include the values of the cells that were updated. By default, responses do not include the updated values. The `updatedData` field within each of the BatchUpdateValuesResponse.responses will contain the updated values. If the range to write was larger than than the range actually written, the response will include all values in the requested range (excluding trailing empty rows and columns). Defaults to: `null`.
- responseDateTimeRenderOption (String.t): Determines how dates, times, and durations in the response should be rendered. This is ignored if response_value_render_option is FORMATTED_VALUE. The default dateTime render option is DateTimeRenderOption.SERIAL_NUMBER. Defaults to: `null`.
- Enum - one of [SERIAL_NUMBER, FORMATTED_STRING]
- responseValueRenderOption (String.t): Determines how values in the response should be rendered. The default render option is ValueRenderOption.FORMATTED_VALUE. Defaults to: `null`.
- Enum - one of [FORMATTED_VALUE, UNFORMATTED_VALUE, FORMULA]
- valueInputOption (String.t): How the input data should be interpreted. Defaults to: `null`.
- Enum - one of [INPUT_VALUE_OPTION_UNSPECIFIED, RAW, USER_ENTERED]
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:data => list(GoogleApi.Sheets.V4.Model.DataFilterValueRange.t()),
:includeValuesInResponse => any(),
:responseDateTimeRenderOption => any(),
:responseValueRenderOption => any(),
:valueInputOption => any()
}
field(:data, as: GoogleApi.Sheets.V4.Model.DataFilterValueRange, type: :list)
field(:includeValuesInResponse)
field(:responseDateTimeRenderOption)
field(:responseValueRenderOption)
field(:valueInputOption)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.BatchUpdateValuesByDataFilterRequest do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.BatchUpdateValuesByDataFilterRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.BatchUpdateValuesByDataFilterRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 54.079365 | 508 | 0.768418 |
7990b537559574ced4930f6abfd2469e3c725d7f | 1,114 | ex | Elixir | apps/tai/lib/tai/orders/submissions/sell_limit_gtc.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | null | null | null | apps/tai/lib/tai/orders/submissions/sell_limit_gtc.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | 78 | 2020-10-12T06:21:43.000Z | 2022-03-28T09:02:00.000Z | apps/tai/lib/tai/orders/submissions/sell_limit_gtc.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | null | null | null | defmodule Tai.Orders.Submissions.SellLimitGtc do
alias __MODULE__
@type venue_id :: Tai.Venue.id()
@type credential_id :: Tai.Venue.credential_id()
@type venue_product_symbol :: Tai.Venues.Product.venue_symbol()
@type product_symbol :: Tai.Venues.Product.symbol()
@type product_type :: Tai.Venues.Product.type()
@type callback :: Tai.Orders.Order.callback()
@type t :: %SellLimitGtc{
venue_id: venue_id,
credential_id: credential_id,
venue_product_symbol: venue_product_symbol,
product_symbol: product_symbol,
product_type: product_type,
price: Decimal.t(),
qty: Decimal.t(),
close: boolean | nil,
post_only: boolean,
order_updated_callback: callback
}
@enforce_keys ~w(
venue_id
credential_id
venue_product_symbol
product_symbol
product_type
price
qty
post_only
)a
defstruct ~w(
venue_id
credential_id
venue_product_symbol
product_symbol
product_type
price
qty
post_only
close
order_updated_callback
)a
end
| 24.217391 | 65 | 0.663375 |
7990d73c993da20f5a663d4a52a8b27a49d50145 | 1,158 | ex | Elixir | lib/mollie/organizations.ex | LostKobrakai/mollie | dd697dfcf376eb1889d6371cc1179a335ef70b59 | [
"MIT"
] | null | null | null | lib/mollie/organizations.ex | LostKobrakai/mollie | dd697dfcf376eb1889d6371cc1179a335ef70b59 | [
"MIT"
] | null | null | null | lib/mollie/organizations.ex | LostKobrakai/mollie | dd697dfcf376eb1889d6371cc1179a335ef70b59 | [
"MIT"
] | null | null | null | defmodule Mollie.Organizations do
import Mollie
alias Mollie.Client
@doc """
Retrieve the currently authenticated organization.
## Example
Mollie.Organizations.me client
More info at: https://docs.mollie.com/reference/v2/organizations-api/current-organization
"""
@spec me(Client.t()) :: Mollie.response()
def me(client) do
get("v2/organizations/me", client)
end
@doc """
Retrieve all organizations that are connected to your partner-account.
This API is only for partners.
## Example
Mollie.Organizations.list client
More info at: https://docs.mollie.com/reference/v2/organizations-api/list-organizations
"""
@spec list(Client.t(), map | list) :: Mollie.response()
def list(client, params \\ %{}) do
get("v2/organizations", client, params)
end
@doc """
Retrieve an organization by its ID.
## Example
Mollie.Organizations.find client, "org_12345678"
More info at: https://docs.mollie.com/reference/v2/organizations-api/get-organization
"""
@spec find(Client.t(), binary) :: Mollie.response()
def find(client, id) do
get("v2/organizations/#{id}", client)
end
end
| 24.125 | 91 | 0.693437 |
799105aae0d87886f517cd7b8c78cbc762728bfc | 948 | exs | Elixir | mix.exs | bonfire-networks/phoenix_gon | 2b8b3d330ac653751d175286eee6f02cb206f2e4 | [
"MIT"
] | null | null | null | mix.exs | bonfire-networks/phoenix_gon | 2b8b3d330ac653751d175286eee6f02cb206f2e4 | [
"MIT"
] | null | null | null | mix.exs | bonfire-networks/phoenix_gon | 2b8b3d330ac653751d175286eee6f02cb206f2e4 | [
"MIT"
] | null | null | null | defmodule PhoenixGon.Mixfile do
use Mix.Project
def project do
[
app: :phoenix_gon,
version: "0.4.1",
elixir: "~> 1.4",
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
description: description(),
package: package(),
deps: deps()
]
end
def application do
[applications: [:logger]]
end
defp description do
"""
PhoenixGon hex - your Phoenix variables in your JavaScript.
"""
end
defp package do
[
name: :phoenix_gon,
files: ~w{lib} ++ ~w{mix.exs README.md},
maintainers: ["Marat Khusnetdinov"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/khusnetdinov/phoenix_gon"}
]
end
defp deps do
[
{:ex_doc, ">= 0.0.0", only: :dev},
{:poison, "~> 3.0", optional: true},
{:phoenix_html, "~> 3.2"},
{:plug, "~> 1.0"},
{:recase, "~> 0.4"}
]
end
end
| 20.170213 | 73 | 0.53692 |
7991139e046a8e8428e9b912be48ff7bf1debf6c | 148 | exs | Elixir | apps/performance_2/config/test.exs | WhiteRookPL/elixir-fire-brigade-workshop | 1c6183339fc623842a09f4d10be75bcecf2c37e7 | [
"MIT"
] | 14 | 2017-08-09T14:21:47.000Z | 2022-03-11T04:10:49.000Z | apps/performance_2/config/test.exs | nicholasjhenry/elixir-fire-brigade-workshop | 1c6183339fc623842a09f4d10be75bcecf2c37e7 | [
"MIT"
] | null | null | null | apps/performance_2/config/test.exs | nicholasjhenry/elixir-fire-brigade-workshop | 1c6183339fc623842a09f4d10be75bcecf2c37e7 | [
"MIT"
] | 15 | 2017-09-05T15:43:53.000Z | 2020-04-13T16:20:18.000Z | use Mix.Config
config :spell_checker_api, SpellCheckerAPI.Web.Endpoint,
http: [
port: 4001
],
server: false
config :logger, level: :warn | 16.444444 | 56 | 0.709459 |
79911ea42a47aae928916fc186c368b9795f83c2 | 388 | ex | Elixir | lib/postoffice_web/controllers/api/topic_controller.ex | IgorRodriguez/postoffice | 9012193e0780f2403bd3db90b8f6258656780fee | [
"Apache-2.0"
] | 15 | 2020-01-24T10:33:57.000Z | 2020-10-24T07:57:14.000Z | lib/postoffice_web/controllers/api/topic_controller.ex | IgorRodriguez/postoffice | 9012193e0780f2403bd3db90b8f6258656780fee | [
"Apache-2.0"
] | 24 | 2020-01-24T09:52:56.000Z | 2021-02-19T09:15:12.000Z | lib/postoffice_web/controllers/api/topic_controller.ex | IgorRodriguez/postoffice | 9012193e0780f2403bd3db90b8f6258656780fee | [
"Apache-2.0"
] | 5 | 2020-01-25T18:03:44.000Z | 2021-02-23T10:07:03.000Z | defmodule PostofficeWeb.Api.TopicController do
use PostofficeWeb, :controller
alias Postoffice.Messaging.Topic
action_fallback PostofficeWeb.Api.FallbackController
def create(conn, topic_params) do
with {:ok, %Topic{} = topic} <- Postoffice.create_topic(topic_params) do
conn
|> put_status(:created)
|> render("show.json", topic: topic)
end
end
end
| 24.25 | 76 | 0.719072 |
7991242d9fd33b12917e736abecf7a2ffe3c124a | 6,743 | ex | Elixir | lib/tracer/handler_agent.ex | gabiz/tracer | 4659bf3cc9e0cc86e653794b800e9eb5bbe9f0a2 | [
"MIT"
] | 125 | 2017-09-08T06:33:28.000Z | 2022-03-09T10:48:04.000Z | lib/tracer/handler_agent.ex | Gazler/tracer | 4659bf3cc9e0cc86e653794b800e9eb5bbe9f0a2 | [
"MIT"
] | 3 | 2017-09-21T01:56:24.000Z | 2020-10-29T13:27:44.000Z | lib/tracer/handler_agent.ex | Gazler/tracer | 4659bf3cc9e0cc86e653794b800e9eb5bbe9f0a2 | [
"MIT"
] | 6 | 2017-09-08T21:41:32.000Z | 2019-11-16T10:39:16.000Z | defmodule Tracer.HandlerAgent do
@moduledoc """
HandlerAgent takes care of starting and stopping traces in the
NUT (node under test), as well as watching over the event handler
as it processes events.
"""
alias __MODULE__
alias Tracer.PidHandler
import Tracer.Macros
@default_max_tracing_time 30_000
defstruct node: nil,
handler_pid: nil,
timer_ref: nil,
max_tracing_time: @default_max_tracing_time,
pid_handler_opts: [],
start_trace_cmds: [],
stop_trace_cmds: []
def start(opts \\ []) do
initial_state = process_opts(%HandlerAgent{}, opts)
pid = spawn_in_target(initial_state)
send pid, :start
pid
end
def stop(pid) do
send pid, :stop
end
defp process_opts(state, opts) do
state
|> Map.put(:node, Keyword.get(opts, :node, nil))
|> Map.put(:start_trace_cmds, Keyword.get(opts, :start_trace_cmds, []))
|> Map.put(:stop_trace_cmds, Keyword.get(opts, :stop_trace_cmds, []))
|> assign_to(state)
state = if Keyword.get(opts, :max_tracing_time) != nil do
put_in(state.max_tracing_time, Keyword.get(opts, :max_tracing_time))
else
state
end
state = if Keyword.get(opts, :max_message_count) != nil do
put_in(state.pid_handler_opts,
[{:max_message_count, Keyword.get(opts, :max_message_count)}
| state.pid_handler_opts])
else
state
end
state = if Keyword.get(opts, :max_queue_size) != nil do
put_in(state.pid_handler_opts,
[{:max_queue_size, Keyword.get(opts, :max_queue_size)}
| state.pid_handler_opts])
else
state
end
event_callback =
if Keyword.get(opts, :forward_pid) != nil do
{:event_callback, {&__MODULE__.forwarding_handler_callback/2,
Keyword.get(opts, :forward_pid)}}
else
{:event_callback, &__MODULE__.discard_handler_callback/1}
end
if Keyword.get(opts, :event_callback) != nil do
put_in(state.pid_handler_opts,
[{:event_callback, Keyword.get(opts, :event_callback)}
| state.pid_handler_opts])
else
put_in(state.pid_handler_opts,
[event_callback | state.pid_handler_opts])
end
end
defp spawn_in_target(state) do
if state.node != nil do
[__MODULE__, Tracer.PidHandler] |> Enum.each(fn mod ->
ensure_loaded_remote(state.node, mod)
end)
Node.spawn_link(state.node, fn -> process_loop(state) end)
else
spawn_link(fn -> process_loop(state) end)
end
end
defp process_loop(state) do
receive do
:start ->
Process.flag(:trap_exit, true)
state
|> start_handler()
|> stop_tracing()
|> start_tracing()
|> start_timer()
|> process_loop()
{:timeout, _timeref, _} ->
stop_tracing_and_handler(state)
exit({:done_tracing, :tracing_timeout, state.max_tracing_time})
:stop ->
stop_tracing_and_handler(state)
exit({:done_tracing, :stop_command})
{:EXIT, _, :normal} -> # we should be dead by the time this is sent
exit(:normal)
{:EXIT, _, {:message_queue_size, len}} ->
stop_tracing(state)
exit({:done_tracing, :message_queue_size, len})
{:EXIT, _, {:max_message_count, count}} ->
stop_tracing(state)
exit({:done_tracing, :max_message_count, count})
:restart_timer ->
state
|> cancel_timer()
|> start_timer()
|> process_loop()
# testing helpers
{:get_handler_pid, sender_pid} ->
send sender_pid, {:handler_pid, state.handler_pid}
process_loop(state)
{:get_pid_handler_opts, sender_pid} ->
send sender_pid, {:pid_handler_opts, state.pid_handler_opts}
process_loop(state)
_ignore -> process_loop(state)
end
end
defp start_timer(state) do
ref = :erlang.start_timer(state.max_tracing_time, self(), [])
put_in(state.timer_ref, ref)
end
defp cancel_timer(state) do
:erlang.cancel_timer(state.timer_ref, [])
put_in(state.timer_ref, nil)
end
defp stop_tracing_and_handler(state) do
state
|> stop_tracing()
|> stop_handler()
end
defp start_handler(state) do
handler_pid = PidHandler.start(state.pid_handler_opts)
put_in(state.handler_pid, handler_pid)
end
defp stop_handler(state) do
state
|> Map.get(:handler_pid)
|> PidHandler.stop()
put_in(state.handler_pid, nil)
end
def start_tracing(state) do
# TODO store the number of matches, so that it can be send back to admin
# process
trace_fun = &:erlang.trace/3
state.start_trace_cmds
|> Enum.each(fn
[{:fun, ^trace_fun} | args] ->
bare_args = Enum.map(args, fn
# inject tracer option
{:flag_list, flags} -> [{:tracer, state.handler_pid} | flags]
{_other, arg} -> arg
end)
# IO.puts("#{inspect trace_fun} args: #{inspect bare_args}")
_res = apply(trace_fun, bare_args)
# IO.puts("#{inspect trace_fun} args: #{inspect bare_args}" <>
# " = #{inspect res}")
[{:fun, fun} | args] ->
bare_args = Enum.map(args, &(elem(&1, 1)))
_res = apply(fun, bare_args)
# IO.puts("#{inspect fun} args: #{inspect bare_args} = #{inspect res}")
end)
state
end
def stop_tracing(state) do
state.stop_trace_cmds
|> Enum.each(fn [{:fun, fun} | args] ->
bare_args = Enum.map(args, &(elem(&1, 1)))
apply(fun, bare_args)
end)
state
end
# Handler Callbacks
def discard_handler_callback(_event) do
:ok
end
def forwarding_handler_callback(event, pid) do
send pid, event
{:ok, pid}
end
# Remote Loading Helpers
# credit: based on redbug https://github.com/massemanet/redbug
defp ensure_loaded_remote(node, mod) do
case :rpc.call(node, mod, :module_info, [:compile]) do
{:badrpc, {:EXIT, {:undef, _}}} ->
# module was not found
load_remote(node, mod)
ensure_loaded_remote(node, mod)
:ok
{:badrpc , _} -> :ok
info when is_list(info) ->
case {get_ts(info), get_ts(mod.module_info(:compile))} do
{:interpreted, _} -> :ok
{target, host} when target < host -> # old code on target
load_remote(node, mod)
ensure_loaded_remote(node, mod)
_ -> :ok
end
end
end
defp load_remote(node, mod) do
{mod, bin, fun} = :code.get_object_code(mod)
{:module, _mod} = :rpc.call(node, :code, :load_binary, [mod, fun, bin])
end
defp get_ts([]), do: :interpreted
defp get_ts([{:time, time} | _]), do: time
defp get_ts([_ | rest]), do: get_ts(rest)
end
| 28.816239 | 79 | 0.619606 |
79912c713e374df974755e580fabadd91e7b2e8f | 5,232 | ex | Elixir | lib/web/websocket.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | 1 | 2020-08-27T18:43:11.000Z | 2020-08-27T18:43:11.000Z | lib/web/websocket.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | null | null | null | lib/web/websocket.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | 1 | 2020-08-27T18:43:21.000Z | 2020-08-27T18:43:21.000Z | # Copyright(c) 2015-2020 ACCESS CO., LTD. All rights reserved.
use Croma
defmodule Antikythera.Websocket do
@moduledoc """
Behaviour module for websocket handlers.
Note the naming convention of the websocket-related modules; we use `Websocket`, `WebSocket` is not allowed.
Websocket module of gears must `use` this module as in the example below.
`use Antikythera.Websocket` implicitly invokes `use Antikythera.Controller`, for convenience in implementing `connect/1` callback.
## Example
The following example simply echoes back messages from client:
defmodule MyGear.Websocket do
use Antikythera.Websocket
def init(_conn) do
{%{}, []}
end
def handle_client_message(state, _conn, frame) do
{state, [frame]}
end
def handle_server_message(state, _conn, _msg) do
{state, []}
end
end
## Name registration
Once a websocket connection is established, subsequent bidirectional communication is handled by a dedicated connection process.
To send websocket frames to the connected client,
you should first be able to send messages to the connection process when a particular event occurs somewhere in the cluster.
To this end antikythera provides a process registry mechanism which makes connection processes accessible by "name"s.
To register connection processes, call `Antikythera.Registry.Unique.register/2` and/or `Antikythera.Registry.Group.join/2`
in your `init/1` implementation.
Then, to notify events of connection processes, use `Antikythera.Registry.Unique.send_message/3` or
`Antikythera.Registry.Group.publish/3`.
Finally to send websocket message from a connection process to client, implement `handle_server_message/3` callback
so that it returns an appropriate websocket frame using the message.
See `Antikythera.Registry.Unique` and `Antikythera.Registry.Group` for more detail of the registry.
"""
alias Antikythera.Conn
alias Antikythera.Websocket.{Frame, FrameList}
@type state :: any
@type terminate_reason :: :normal | :stop | :timeout | :remote | {:remote, Frame.close_code, Frame.close_payload} | {:error, any}
@typedoc """
Type of return value of `init/1`, `handle_client_message/3` and `handle_server_message/3` callbacks.
The 1st element of the return value is used as the new state.
The 2nd element of the return value is sent to the client.
To close the connection, include a `:close` frame in the 2nd element of the return value.
Note that the remaining frames after the close frame will not be sent.
"""
@type callback_result :: {state, FrameList.t}
@doc """
Callback function to be used during websocket handshake request.
This callback is implemented in basically the same way as ordinary controller actions.
You can use plugs and controller helper functions.
The only difference is that on success this function returns a `Antikythera.Conn.t` without setting an HTTP status code.
This callback is responsible for authenticating/authorizing the client.
If the client is valid and it's OK to start websocket communication, implementation of this callback must return the given `Antikythera.Conn.t`.
On the other hand if the client is not allowed to open websocket connection, this function must return an error as a usual HTTP response.
`use Antikythera.Websocket` generates a default implementation of this callback, which just returns the given `Antikythera.Conn.t`.
Note that you can use plugs without overriding the default.
"""
@callback connect(Conn.t) :: Conn.t
@doc """
Callback function to be called right after a connection is established.
This callback is responsible for:
1. initialize the process state (1st element of return value)
2. send initial message to client (2nd element of return value)
3. register the process to make it accessible from other processes in the system (see "Name registration" above)
"""
@callback init(Conn.t) :: callback_result
@doc """
Callback function to be called on receipt of a client message.
"""
@callback handle_client_message(state, Conn.t, Frame.t) :: callback_result
@doc """
Callback function to be called on receipt of a message from other process in the cluster.
"""
@callback handle_server_message(state, Conn.t, any) :: callback_result
@doc """
Callback function to clean up resources used by the websocket connection.
For typical use cases you don't need to implement this callback;
`Antikythera.Websocket` generates a default implementation (which does nothing) for you.
"""
@callback terminate(state, Conn.t, terminate_reason) :: any
defmacro __using__(_) do
quote do
expected = Mix.Project.config()[:app] |> Atom.to_string() |> Macro.camelize() |> Module.concat("Websocket")
if __MODULE__ != expected do
raise "invalid module name: expected=#{expected} actual=#{__MODULE__}"
end
@behaviour Antikythera.Websocket
use Antikythera.Controller
@impl true
def connect(conn), do: conn
@impl true
def terminate(_state, _conn, _reason), do: :ok
defoverridable [connect: 1, terminate: 3]
end
end
end
| 39.636364 | 146 | 0.734327 |
7991324ccbf70195f87390b543a72dc113b48019 | 793 | ex | Elixir | deps/makeup/lib/makeup/styles/html/pygments/vs.ex | arduino-man/fona_modern | 61845bbbbc46a61a50e59a97c68709f2722078a6 | [
"MIT"
] | null | null | null | deps/makeup/lib/makeup/styles/html/pygments/vs.ex | arduino-man/fona_modern | 61845bbbbc46a61a50e59a97c68709f2722078a6 | [
"MIT"
] | null | null | null | deps/makeup/lib/makeup/styles/html/pygments/vs.ex | arduino-man/fona_modern | 61845bbbbc46a61a50e59a97c68709f2722078a6 | [
"MIT"
] | null | null | null |
defmodule Makeup.Styles.HTML.VisualStudioStyle do
@moduledoc false
@styles %{
:error => "border:#FF0000",
:keyword => "#0000ff",
:keyword_type => "#2b91af",
:name_class => "#2b91af",
:string => "#a31515",
:operator_word => "#0000ff",
:comment => "#008000",
:comment_preproc => "#0000ff",
:generic_emph => "italic",
:generic_heading => "bold",
:generic_prompt => "bold",
:generic_strong => "bold",
:generic_subheading => "bold",
}
alias Makeup.Styles.HTML.Style
@style_struct Style.make_style(
short_name: "vs",
long_name: "VisualStudio Style",
background_color: "#ffffff",
highlight_color: "#ffffcc",
styles: @styles)
def style() do
@style_struct
end
end | 23.323529 | 50 | 0.582598 |
79913f0e50407ba548a631411ed18010a4994898 | 759 | exs | Elixir | test/r_utils_test.exs | TORIFUKUKaiou/ex-r_enum | 5fd99b61dae3c7fe8fc3838158af342ff308dff1 | [
"Apache-2.0"
] | 24 | 2022-01-13T23:13:11.000Z | 2022-03-27T18:02:39.000Z | test/r_utils_test.exs | TORIFUKUKaiou/ex-r_enum | 5fd99b61dae3c7fe8fc3838158af342ff308dff1 | [
"Apache-2.0"
] | 16 | 2022-01-16T09:18:17.000Z | 2022-02-08T01:10:09.000Z | test/r_utils_test.exs | TORIFUKUKaiou/ex-r_enum | 5fd99b61dae3c7fe8fc3838158af342ff308dff1 | [
"Apache-2.0"
] | 6 | 2022-01-16T04:40:42.000Z | 2022-02-07T14:56:26.000Z | defmodule RUtilsTest do
use ExUnit.Case
doctest RUtils
test "blank?/1" do
assert RUtils.blank?(%{})
assert RUtils.blank?([])
assert RUtils.blank?(nil)
assert RUtils.blank?(false)
assert RUtils.blank?(" ")
refute RUtils.blank?([1])
refute RUtils.blank?(true)
refute RUtils.blank?(1)
refute RUtils.blank?(" a ")
refute RUtils.blank?(%{a: [1]})
end
test "present?/1" do
refute RUtils.present?(%{})
refute RUtils.present?([])
refute RUtils.present?(nil)
refute RUtils.present?(false)
refute RUtils.present?(" ")
assert RUtils.present?([1])
assert RUtils.present?(true)
assert RUtils.present?(1)
assert RUtils.present?(" a ")
assert RUtils.present?(%{a: [1]})
end
end
| 24.483871 | 37 | 0.627141 |
79914a430469c43304357af326c07f7368ff355f | 2,828 | ex | Elixir | clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_api_proxy.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_api_proxy.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_api_proxy.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1ApiProxy do
@moduledoc """
Metadata describing the API proxy
## Attributes
* `apiProxyType` (*type:* `String.t`, *default:* `nil`) - Output only. The type of the API proxy.
* `labels` (*type:* `map()`, *default:* `nil`) - User labels applied to this API Proxy.
* `latestRevisionId` (*type:* `String.t`, *default:* `nil`) - Output only. The id of the most recently created revision for this api proxy.
* `metaData` (*type:* `GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1EntityMetadata.t`, *default:* `nil`) - Output only. Metadata describing the API proxy.
* `name` (*type:* `String.t`, *default:* `nil`) - Output only. Name of the API proxy.
* `readOnly` (*type:* `boolean()`, *default:* `nil`) - Output only. Whether this proxy is read-only. A read-only proxy cannot have new revisions created through calls to CreateApiProxyRevision. A proxy is read-only if it was generated by an archive.
* `revision` (*type:* `list(String.t)`, *default:* `nil`) - Output only. List of revisions defined for the API proxy.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:apiProxyType => String.t() | nil,
:labels => map() | nil,
:latestRevisionId => String.t() | nil,
:metaData => GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1EntityMetadata.t() | nil,
:name => String.t() | nil,
:readOnly => boolean() | nil,
:revision => list(String.t()) | nil
}
field(:apiProxyType)
field(:labels, type: :map)
field(:latestRevisionId)
field(:metaData, as: GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1EntityMetadata)
field(:name)
field(:readOnly)
field(:revision, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1ApiProxy do
def decode(value, options) do
GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1ApiProxy.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1ApiProxy do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 43.507692 | 253 | 0.701556 |
7991817f9c6ec50e0aaecd95e1416f495a8a23ab | 840 | exs | Elixir | elixir/acronym/acronym_test.exs | ArtemGordinsky/exercism | 74e91e3b48ca920803474ec435c13eac66351b9f | [
"Unlicense"
] | null | null | null | elixir/acronym/acronym_test.exs | ArtemGordinsky/exercism | 74e91e3b48ca920803474ec435c13eac66351b9f | [
"Unlicense"
] | null | null | null | elixir/acronym/acronym_test.exs | ArtemGordinsky/exercism | 74e91e3b48ca920803474ec435c13eac66351b9f | [
"Unlicense"
] | null | null | null | if !System.get_env("EXERCISM_TEST_EXAMPLES") do
Code.load_file("acronym.exs", __DIR__)
end
ExUnit.start
ExUnit.configure exclude: :pending, trace: true
defmodule AcronymTest do
use ExUnit.Case
test "it produces acronyms from title case" do
assert Acronym.abbreviate("Portable Networks Graphic") === "PNG"
end
test "it produces acronyms from lower case" do
assert Acronym.abbreviate("Ruby on Rails") === "ROR"
end
test "it produces acronyms from inconsistent case" do
assert Acronym.abbreviate("HyperText Markup Language") === "HTML"
end
test "it ignores punctuation" do
assert Acronym.abbreviate("First in, First out") === "FIFO"
end
test "it produces acronyms ignoring punctuation and casing" do
assert Acronym.abbreviate("Complementary Metal-Oxide semiconductor") === "CMOS"
end
end
| 24 | 83 | 0.727381 |
7991d2f9bceea076eb08cf92cb0493d0d9e70d18 | 1,510 | ex | Elixir | lib/live_view_collection_web/views/error_helpers.ex | sandeshsoni/phoenix-liveview-collection | 6a3ae19f9aec1d2776a38edca2ff8497fc371257 | [
"MIT"
] | 32 | 2019-04-01T02:39:49.000Z | 2020-12-04T02:57:01.000Z | lib/live_view_collection_web/views/error_helpers.ex | sandeshsoni/phoenix-liveview-collection | 6a3ae19f9aec1d2776a38edca2ff8497fc371257 | [
"MIT"
] | 13 | 2020-03-29T19:40:34.000Z | 2021-12-06T22:18:02.000Z | lib/live_view_collection_web/views/error_helpers.ex | sandeshsoni/phoenix-liveview-collection | 6a3ae19f9aec1d2776a38edca2ff8497fc371257 | [
"MIT"
] | 4 | 2019-08-01T16:21:16.000Z | 2020-04-01T17:39:10.000Z | defmodule LiveViewCollectionWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error), class: "help-block")
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(LiveViewCollectionWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(LiveViewCollectionWeb.Gettext, "errors", msg, opts)
end
end
end
| 33.555556 | 87 | 0.677483 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.